You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by sh...@apache.org on 2016/09/14 08:49:48 UTC

[01/50] [abbrv] kylin git commit: minor, add one more test case [Forced Update!]

Repository: kylin
Updated Branches:
  refs/heads/KYLIN-1726 2e4ec6470 -> 3e081b3fb (forced update)


minor, add one more test case


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/01d033f7
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/01d033f7
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/01d033f7

Branch: refs/heads/KYLIN-1726
Commit: 01d033f76a058ca0cf7ef81728b7de6204115167
Parents: ddeb745
Author: Hongbin Ma <ma...@apache.org>
Authored: Thu Sep 8 11:18:52 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Thu Sep 8 11:18:52 2016 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/cube/CubeCapabilityChecker.java   |  7 ++++++-
 kylin-it/src/test/resources/query/sql_like/query18.sql | 13 +++++++++++++
 2 files changed, 19 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/01d033f7/core-cube/src/main/java/org/apache/kylin/cube/CubeCapabilityChecker.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeCapabilityChecker.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeCapabilityChecker.java
index 79d1e3b..e8c96b4 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeCapabilityChecker.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeCapabilityChecker.java
@@ -31,6 +31,7 @@ import org.apache.kylin.metadata.filter.UDF.MassInTupleFilter;
 import org.apache.kylin.metadata.model.FunctionDesc;
 import org.apache.kylin.metadata.model.IStorageAware;
 import org.apache.kylin.metadata.model.MeasureDesc;
+import org.apache.kylin.metadata.model.ParameterDesc;
 import org.apache.kylin.metadata.model.TblColRef;
 import org.apache.kylin.metadata.realization.CapabilityResult;
 import org.apache.kylin.metadata.realization.SQLDigest;
@@ -146,7 +147,11 @@ public class CubeCapabilityChecker {
             }
 
             // calcite can do aggregation from columns on-the-fly
-            List<TblColRef> neededCols = functionDesc.getParameter().getColRefs();
+            ParameterDesc parameterDesc = functionDesc.getParameter();
+            if (parameterDesc == null) {
+                continue;
+            }
+            List<TblColRef> neededCols = parameterDesc.getColRefs();
             if (neededCols.size() > 0 && cubeDesc.listDimensionColumnsIncludingDerived().containsAll(neededCols) && FunctionDesc.BUILT_IN_AGGREGATIONS.contains(functionDesc.getExpression())) {
                 result.influences.add(new CapabilityResult.DimensionAsMeasure(functionDesc));
                 it.remove();

http://git-wip-us.apache.org/repos/asf/kylin/blob/01d033f7/kylin-it/src/test/resources/query/sql_like/query18.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql_like/query18.sql b/kylin-it/src/test/resources/query/sql_like/query18.sql
new file mode 100644
index 0000000..8ef6ad4
--- /dev/null
+++ b/kylin-it/src/test/resources/query/sql_like/query18.sql
@@ -0,0 +1,13 @@
+
+select USER_DEFINED_FIELD3 as abc
+ 
+ from test_kylin_fact 
+inner JOIN edw.test_cal_dt as test_cal_dt
+ ON test_kylin_fact.cal_dt = test_cal_dt.cal_dt
+ inner JOIN test_category_groupings
+ ON test_kylin_fact.leaf_categ_id = test_category_groupings.leaf_categ_id AND test_kylin_fact.lstg_site_id = test_category_groupings.site_id
+ inner JOIN edw.test_sites as test_sites
+ ON test_kylin_fact.lstg_site_id = test_sites.site_id
+ 
+ 
+where upper(USER_DEFINED_FIELD3) like '%VID%'


[24/50] [abbrv] kylin git commit: KYLIN-1827 Send mail notification when runtime exception throws during build/merge cube

Posted by sh...@apache.org.
KYLIN-1827 Send mail notification when runtime exception throws during build/merge cube

Signed-off-by: shaofengshi <sh...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/0954176a
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/0954176a
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/0954176a

Branch: refs/heads/KYLIN-1726
Commit: 0954176adb50df25d0995a7fbeb68e64ee7aed79
Parents: c59d63d
Author: Ma,Gang <ga...@ebay.com>
Authored: Fri Jul 15 11:27:40 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Mon Sep 12 11:23:19 2016 +0800

----------------------------------------------------------------------
 .../kylin/job/execution/AbstractExecutable.java | 123 ++++++++++++++-----
 .../org/apache/kylin/engine/mr/CubingJob.java   |  40 ++++++
 2 files changed, 129 insertions(+), 34 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/0954176a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java b/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
index 09f9b54..1eee5da 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
@@ -30,6 +30,7 @@ import org.apache.commons.lang3.tuple.Pair;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.MailService;
 import org.apache.kylin.job.exception.ExecuteException;
+import org.apache.kylin.job.exception.PersistentException;
 import org.apache.kylin.job.impl.threadpool.DefaultContext;
 import org.apache.kylin.job.manager.ExecutableManager;
 import org.slf4j.Logger;
@@ -99,31 +100,62 @@ public abstract class AbstractExecutable implements Executable, Idempotent {
 
         Preconditions.checkArgument(executableContext instanceof DefaultContext);
         ExecuteResult result = null;
+        try {
+            onExecuteStart(executableContext);
+            Throwable exception;
+            do {
+                if (retry > 0) {
+                    logger.info("Retry " + retry);
+                }
+                exception = null;
+                result = null;
+                try {
+                    result = doWork(executableContext);
+                } catch (Throwable e) {
+                    logger.error("error running Executable: " + this.toString());
+                    exception = e;
+                }
+                retry++;
+            } while (((result != null && result.succeed() == false) || exception != null) && needRetry() == true);
 
-        onExecuteStart(executableContext);
-        Throwable exception;
-        do {
-            if (retry > 0) {
-                logger.info("Retry " + retry);
+            if (exception != null) {
+                onExecuteError(exception, executableContext);
+                throw new ExecuteException(exception);
             }
-            exception = null;
-            result = null;
-            try {
-                result = doWork(executableContext);
-            } catch (Throwable e) {
-                logger.error("error running Executable: " + this.toString());
-                exception = e;
+
+            onExecuteFinished(result, executableContext);
+        } catch (Exception e) {
+            if (isMetaDataPersistException(e)){
+                handleMetaDataPersistException(e);
+            }
+            if (e instanceof ExecuteException){
+                throw e;
+            } else {
+                throw new ExecuteException(e);
             }
-            retry++;
-        } while (((result != null && result.succeed() == false) || exception != null) && needRetry() == true);
+        }
+        return result;
+    }
+
+    protected void handleMetaDataPersistException(Exception e) {
+        // do nothing.
+    }
 
-        if (exception != null) {
-            onExecuteError(exception, executableContext);
-            throw new ExecuteException(exception);
+    private boolean isMetaDataPersistException(Exception e) {
+        if (e instanceof PersistentException){
+            return true;
         }
 
-        onExecuteFinished(result, executableContext);
-        return result;
+        Throwable t = e.getCause();
+        int depth = 0;
+        while (t!= null && depth<5) {
+            depth ++;
+            if (t instanceof PersistentException){
+                return true;
+            }
+            t = t.getCause();
+        }
+        return false;
     }
 
     protected abstract ExecuteResult doWork(ExecutableContext context) throws ExecuteException;
@@ -209,29 +241,52 @@ public abstract class AbstractExecutable implements Executable, Idempotent {
 
     protected final void notifyUserStatusChange(ExecutableContext context, ExecutableState state) {
         try {
-            List<String> users = Lists.newArrayList();
-            users.addAll(getNotifyList());
             final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-            final String[] adminDls = kylinConfig.getAdminDls();
-            if (null != adminDls) {
-                for (String adminDl : adminDls) {
-                    users.add(adminDl);
-                }
-            }
+            List<String> users =  getAllNofifyUsers(kylinConfig);
             if (users.isEmpty()) {
                 logger.warn("no need to send email, user list is empty");
                 return;
             }
             final Pair<String, String> email = formatNotifications(context, state);
-            if (email == null) {
-                logger.warn("no need to send email, content is null");
+            doSendMail(kylinConfig,users,email);
+        } catch (Exception e) {
+            logger.error("error send email", e);
+        }
+    }
+
+    private List<String> getAllNofifyUsers(KylinConfig kylinConfig){
+        List<String> users = Lists.newArrayList();
+        users.addAll(getNotifyList());
+        final String[] adminDls = kylinConfig.getAdminDls();
+        if (null != adminDls) {
+            for (String adminDl : adminDls) {
+                users.add(adminDl);
+            }
+        }
+        return users;
+    }
+
+    private void doSendMail(KylinConfig kylinConfig, List<String> users, Pair<String,String> email){
+        if (email == null) {
+            logger.warn("no need to send email, content is null");
+            return;
+        }
+        logger.info("prepare to send email to:" + users);
+        logger.info("job name:" + getName());
+        logger.info("submitter:" + getSubmitter());
+        logger.info("notify list:" + users);
+        new MailService(kylinConfig).sendMail(users, email.getLeft(), email.getRight());
+    }
+
+    protected void sendMail(Pair<String, String> email) {
+        try {
+            final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+            List<String> users = getAllNofifyUsers(kylinConfig);
+            if (users.isEmpty()) {
+                logger.warn("no need to send email, user list is empty");
                 return;
             }
-            logger.info("prepare to send email to:" + users);
-            logger.info("job name:" + getName());
-            logger.info("submitter:" + getSubmitter());
-            logger.info("notify list:" + users);
-            new MailService(kylinConfig).sendMail(users, email.getLeft(), email.getRight());
+            doSendMail(kylinConfig, users, email);
         } catch (Exception e) {
             logger.error("error send email", e);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/0954176a/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java
index c9ffe14..9c7f57a 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.engine.mr;
 
+import java.io.PrintWriter;
+import java.io.StringWriter;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.text.SimpleDateFormat;
@@ -183,6 +185,44 @@ public class CubingJob extends DefaultChainedExecutable {
         super.onExecuteFinished(result, executableContext);
     }
 
+    /**
+     * build fail because the metadata store has problem.
+     * @param exception
+     */
+    @Override
+    protected void handleMetaDataPersistException(Exception exception) {
+        String title = "[ERROR] - [" + getDeployEnvName() + "] - [" + getProjectName() + "] - " + CubingExecutableUtil.getCubeName(this.getParams());
+        String content = ExecutableConstants.NOTIFY_EMAIL_TEMPLATE;
+        final String UNKNOWN = "UNKNOWN";
+        String errMsg = null;
+        if (exception != null) {
+            final StringWriter out = new StringWriter();
+            exception.printStackTrace(new PrintWriter(out));
+            errMsg = out.toString();
+        }
+
+        content = content.replaceAll("\\$\\{job_name\\}", getName());
+        content = content.replaceAll("\\$\\{result\\}", ExecutableState.ERROR.toString());
+        content = content.replaceAll("\\$\\{env_name\\}", getDeployEnvName());
+        content = content.replaceAll("\\$\\{project_name\\}", getProjectName());
+        content = content.replaceAll("\\$\\{cube_name\\}", CubingExecutableUtil.getCubeName(this.getParams()));
+        content = content.replaceAll("\\$\\{source_records_count\\}", UNKNOWN);
+        content = content.replaceAll("\\$\\{start_time\\}", UNKNOWN);
+        content = content.replaceAll("\\$\\{duration\\}", UNKNOWN);
+        content = content.replaceAll("\\$\\{mr_waiting\\}", UNKNOWN);
+        content = content.replaceAll("\\$\\{last_update_time\\}", UNKNOWN);
+        content = content.replaceAll("\\$\\{submitter\\}", StringUtil.noBlank(getSubmitter(), "missing submitter"));
+        content = content.replaceAll("\\$\\{error_log\\}", Matcher.quoteReplacement(StringUtil.noBlank(errMsg, "no error message")));
+
+        try {
+            InetAddress inetAddress = InetAddress.getLocalHost();
+            content = content.replaceAll("\\$\\{job_engine\\}", inetAddress.getCanonicalHostName());
+        } catch (UnknownHostException e) {
+            logger.warn(e.getLocalizedMessage(), e);
+        }
+        sendMail(Pair.of(title,content));
+    }
+
     public long getMapReduceWaitTime() {
         return getExtraInfoAsLong(MAP_REDUCE_WAIT_TIME, 0L);
     }


[09/50] [abbrv] kylin git commit: KYLIN-2005 Move all storage side behavior hints to GTScanRequest

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/a2c875d8/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/generated/CubeVisitProtos.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/generated/CubeVisitProtos.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/generated/CubeVisitProtos.java
index d9cef88..b0688b7 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/generated/CubeVisitProtos.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/generated/CubeVisitProtos.java
@@ -11,115 +11,64 @@ public final class CubeVisitProtos {
   public interface CubeVisitRequestOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
 
-    // required string behavior = 1;
+    // required bytes gtScanRequest = 1;
     /**
-     * <code>required string behavior = 1;</code>
-     */
-    boolean hasBehavior();
-    /**
-     * <code>required string behavior = 1;</code>
-     */
-    java.lang.String getBehavior();
-    /**
-     * <code>required string behavior = 1;</code>
-     */
-    com.google.protobuf.ByteString
-        getBehaviorBytes();
-
-    // required bytes gtScanRequest = 2;
-    /**
-     * <code>required bytes gtScanRequest = 2;</code>
+     * <code>required bytes gtScanRequest = 1;</code>
      */
     boolean hasGtScanRequest();
     /**
-     * <code>required bytes gtScanRequest = 2;</code>
+     * <code>required bytes gtScanRequest = 1;</code>
      */
     com.google.protobuf.ByteString getGtScanRequest();
 
-    // required bytes hbaseRawScan = 3;
+    // required bytes hbaseRawScan = 2;
     /**
-     * <code>required bytes hbaseRawScan = 3;</code>
+     * <code>required bytes hbaseRawScan = 2;</code>
      */
     boolean hasHbaseRawScan();
     /**
-     * <code>required bytes hbaseRawScan = 3;</code>
+     * <code>required bytes hbaseRawScan = 2;</code>
      */
     com.google.protobuf.ByteString getHbaseRawScan();
 
-    // required int32 rowkeyPreambleSize = 4;
+    // required int32 rowkeyPreambleSize = 3;
     /**
-     * <code>required int32 rowkeyPreambleSize = 4;</code>
+     * <code>required int32 rowkeyPreambleSize = 3;</code>
      */
     boolean hasRowkeyPreambleSize();
     /**
-     * <code>required int32 rowkeyPreambleSize = 4;</code>
+     * <code>required int32 rowkeyPreambleSize = 3;</code>
      */
     int getRowkeyPreambleSize();
 
-    // repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;
+    // repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;
     /**
-     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
      */
     java.util.List<org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList> 
         getHbaseColumnsToGTList();
     /**
-     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
      */
     org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList getHbaseColumnsToGT(int index);
     /**
-     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
      */
     int getHbaseColumnsToGTCount();
     /**
-     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
      */
     java.util.List<? extends org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntListOrBuilder> 
         getHbaseColumnsToGTOrBuilderList();
     /**
-     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
      */
     org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntListOrBuilder getHbaseColumnsToGTOrBuilder(
         int index);
 
-    // required int64 startTime = 6;
-    /**
-     * <code>required int64 startTime = 6;</code>
-     *
-     * <pre>
-     *when client start the request
-     * </pre>
-     */
-    boolean hasStartTime();
-    /**
-     * <code>required int64 startTime = 6;</code>
-     *
-     * <pre>
-     *when client start the request
-     * </pre>
-     */
-    long getStartTime();
-
-    // required int64 timeout = 7;
+    // required string kylinProperties = 5;
     /**
-     * <code>required int64 timeout = 7;</code>
-     *
-     * <pre>
-     *how long client will wait
-     * </pre>
-     */
-    boolean hasTimeout();
-    /**
-     * <code>required int64 timeout = 7;</code>
-     *
-     * <pre>
-     *how long client will wait
-     * </pre>
-     */
-    long getTimeout();
-
-    // required string kylinProperties = 8;
-    /**
-     * <code>required string kylinProperties = 8;</code>
+     * <code>required string kylinProperties = 5;</code>
      *
      * <pre>
      * kylin properties
@@ -127,7 +76,7 @@ public final class CubeVisitProtos {
      */
     boolean hasKylinProperties();
     /**
-     * <code>required string kylinProperties = 8;</code>
+     * <code>required string kylinProperties = 5;</code>
      *
      * <pre>
      * kylin properties
@@ -135,7 +84,7 @@ public final class CubeVisitProtos {
      */
     java.lang.String getKylinProperties();
     /**
-     * <code>required string kylinProperties = 8;</code>
+     * <code>required string kylinProperties = 5;</code>
      *
      * <pre>
      * kylin properties
@@ -197,44 +146,29 @@ public final class CubeVisitProtos {
             }
             case 10: {
               bitField0_ |= 0x00000001;
-              behavior_ = input.readBytes();
+              gtScanRequest_ = input.readBytes();
               break;
             }
             case 18: {
               bitField0_ |= 0x00000002;
-              gtScanRequest_ = input.readBytes();
-              break;
-            }
-            case 26: {
-              bitField0_ |= 0x00000004;
               hbaseRawScan_ = input.readBytes();
               break;
             }
-            case 32: {
-              bitField0_ |= 0x00000008;
+            case 24: {
+              bitField0_ |= 0x00000004;
               rowkeyPreambleSize_ = input.readInt32();
               break;
             }
-            case 42: {
-              if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
+            case 34: {
+              if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
                 hbaseColumnsToGT_ = new java.util.ArrayList<org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList>();
-                mutable_bitField0_ |= 0x00000010;
+                mutable_bitField0_ |= 0x00000008;
               }
               hbaseColumnsToGT_.add(input.readMessage(org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.PARSER, extensionRegistry));
               break;
             }
-            case 48: {
-              bitField0_ |= 0x00000010;
-              startTime_ = input.readInt64();
-              break;
-            }
-            case 56: {
-              bitField0_ |= 0x00000020;
-              timeout_ = input.readInt64();
-              break;
-            }
-            case 66: {
-              bitField0_ |= 0x00000040;
+            case 42: {
+              bitField0_ |= 0x00000008;
               kylinProperties_ = input.readBytes();
               break;
             }
@@ -246,7 +180,7 @@ public final class CubeVisitProtos {
         throw new com.google.protobuf.InvalidProtocolBufferException(
             e.getMessage()).setUnfinishedMessage(this);
       } finally {
-        if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
+        if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
           hbaseColumnsToGT_ = java.util.Collections.unmodifiableList(hbaseColumnsToGT_);
         }
         this.unknownFields = unknownFields.build();
@@ -785,196 +719,105 @@ public final class CubeVisitProtos {
     }
 
     private int bitField0_;
-    // required string behavior = 1;
-    public static final int BEHAVIOR_FIELD_NUMBER = 1;
-    private java.lang.Object behavior_;
-    /**
-     * <code>required string behavior = 1;</code>
-     */
-    public boolean hasBehavior() {
-      return ((bitField0_ & 0x00000001) == 0x00000001);
-    }
-    /**
-     * <code>required string behavior = 1;</code>
-     */
-    public java.lang.String getBehavior() {
-      java.lang.Object ref = behavior_;
-      if (ref instanceof java.lang.String) {
-        return (java.lang.String) ref;
-      } else {
-        com.google.protobuf.ByteString bs = 
-            (com.google.protobuf.ByteString) ref;
-        java.lang.String s = bs.toStringUtf8();
-        if (bs.isValidUtf8()) {
-          behavior_ = s;
-        }
-        return s;
-      }
-    }
-    /**
-     * <code>required string behavior = 1;</code>
-     */
-    public com.google.protobuf.ByteString
-        getBehaviorBytes() {
-      java.lang.Object ref = behavior_;
-      if (ref instanceof java.lang.String) {
-        com.google.protobuf.ByteString b = 
-            com.google.protobuf.ByteString.copyFromUtf8(
-                (java.lang.String) ref);
-        behavior_ = b;
-        return b;
-      } else {
-        return (com.google.protobuf.ByteString) ref;
-      }
-    }
-
-    // required bytes gtScanRequest = 2;
-    public static final int GTSCANREQUEST_FIELD_NUMBER = 2;
+    // required bytes gtScanRequest = 1;
+    public static final int GTSCANREQUEST_FIELD_NUMBER = 1;
     private com.google.protobuf.ByteString gtScanRequest_;
     /**
-     * <code>required bytes gtScanRequest = 2;</code>
+     * <code>required bytes gtScanRequest = 1;</code>
      */
     public boolean hasGtScanRequest() {
-      return ((bitField0_ & 0x00000002) == 0x00000002);
+      return ((bitField0_ & 0x00000001) == 0x00000001);
     }
     /**
-     * <code>required bytes gtScanRequest = 2;</code>
+     * <code>required bytes gtScanRequest = 1;</code>
      */
     public com.google.protobuf.ByteString getGtScanRequest() {
       return gtScanRequest_;
     }
 
-    // required bytes hbaseRawScan = 3;
-    public static final int HBASERAWSCAN_FIELD_NUMBER = 3;
+    // required bytes hbaseRawScan = 2;
+    public static final int HBASERAWSCAN_FIELD_NUMBER = 2;
     private com.google.protobuf.ByteString hbaseRawScan_;
     /**
-     * <code>required bytes hbaseRawScan = 3;</code>
+     * <code>required bytes hbaseRawScan = 2;</code>
      */
     public boolean hasHbaseRawScan() {
-      return ((bitField0_ & 0x00000004) == 0x00000004);
+      return ((bitField0_ & 0x00000002) == 0x00000002);
     }
     /**
-     * <code>required bytes hbaseRawScan = 3;</code>
+     * <code>required bytes hbaseRawScan = 2;</code>
      */
     public com.google.protobuf.ByteString getHbaseRawScan() {
       return hbaseRawScan_;
     }
 
-    // required int32 rowkeyPreambleSize = 4;
-    public static final int ROWKEYPREAMBLESIZE_FIELD_NUMBER = 4;
+    // required int32 rowkeyPreambleSize = 3;
+    public static final int ROWKEYPREAMBLESIZE_FIELD_NUMBER = 3;
     private int rowkeyPreambleSize_;
     /**
-     * <code>required int32 rowkeyPreambleSize = 4;</code>
+     * <code>required int32 rowkeyPreambleSize = 3;</code>
      */
     public boolean hasRowkeyPreambleSize() {
-      return ((bitField0_ & 0x00000008) == 0x00000008);
+      return ((bitField0_ & 0x00000004) == 0x00000004);
     }
     /**
-     * <code>required int32 rowkeyPreambleSize = 4;</code>
+     * <code>required int32 rowkeyPreambleSize = 3;</code>
      */
     public int getRowkeyPreambleSize() {
       return rowkeyPreambleSize_;
     }
 
-    // repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;
-    public static final int HBASECOLUMNSTOGT_FIELD_NUMBER = 5;
+    // repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;
+    public static final int HBASECOLUMNSTOGT_FIELD_NUMBER = 4;
     private java.util.List<org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList> hbaseColumnsToGT_;
     /**
-     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
      */
     public java.util.List<org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList> getHbaseColumnsToGTList() {
       return hbaseColumnsToGT_;
     }
     /**
-     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
      */
     public java.util.List<? extends org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntListOrBuilder> 
         getHbaseColumnsToGTOrBuilderList() {
       return hbaseColumnsToGT_;
     }
     /**
-     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
      */
     public int getHbaseColumnsToGTCount() {
       return hbaseColumnsToGT_.size();
     }
     /**
-     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
      */
     public org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList getHbaseColumnsToGT(int index) {
       return hbaseColumnsToGT_.get(index);
     }
     /**
-     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+     * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
      */
     public org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntListOrBuilder getHbaseColumnsToGTOrBuilder(
         int index) {
       return hbaseColumnsToGT_.get(index);
     }
 
-    // required int64 startTime = 6;
-    public static final int STARTTIME_FIELD_NUMBER = 6;
-    private long startTime_;
-    /**
-     * <code>required int64 startTime = 6;</code>
-     *
-     * <pre>
-     *when client start the request
-     * </pre>
-     */
-    public boolean hasStartTime() {
-      return ((bitField0_ & 0x00000010) == 0x00000010);
-    }
-    /**
-     * <code>required int64 startTime = 6;</code>
-     *
-     * <pre>
-     *when client start the request
-     * </pre>
-     */
-    public long getStartTime() {
-      return startTime_;
-    }
-
-    // required int64 timeout = 7;
-    public static final int TIMEOUT_FIELD_NUMBER = 7;
-    private long timeout_;
-    /**
-     * <code>required int64 timeout = 7;</code>
-     *
-     * <pre>
-     *how long client will wait
-     * </pre>
-     */
-    public boolean hasTimeout() {
-      return ((bitField0_ & 0x00000020) == 0x00000020);
-    }
-    /**
-     * <code>required int64 timeout = 7;</code>
-     *
-     * <pre>
-     *how long client will wait
-     * </pre>
-     */
-    public long getTimeout() {
-      return timeout_;
-    }
-
-    // required string kylinProperties = 8;
-    public static final int KYLINPROPERTIES_FIELD_NUMBER = 8;
+    // required string kylinProperties = 5;
+    public static final int KYLINPROPERTIES_FIELD_NUMBER = 5;
     private java.lang.Object kylinProperties_;
     /**
-     * <code>required string kylinProperties = 8;</code>
+     * <code>required string kylinProperties = 5;</code>
      *
      * <pre>
      * kylin properties
      * </pre>
      */
     public boolean hasKylinProperties() {
-      return ((bitField0_ & 0x00000040) == 0x00000040);
+      return ((bitField0_ & 0x00000008) == 0x00000008);
     }
     /**
-     * <code>required string kylinProperties = 8;</code>
+     * <code>required string kylinProperties = 5;</code>
      *
      * <pre>
      * kylin properties
@@ -995,7 +838,7 @@ public final class CubeVisitProtos {
       }
     }
     /**
-     * <code>required string kylinProperties = 8;</code>
+     * <code>required string kylinProperties = 5;</code>
      *
      * <pre>
      * kylin properties
@@ -1016,13 +859,10 @@ public final class CubeVisitProtos {
     }
 
     private void initFields() {
-      behavior_ = "";
       gtScanRequest_ = com.google.protobuf.ByteString.EMPTY;
       hbaseRawScan_ = com.google.protobuf.ByteString.EMPTY;
       rowkeyPreambleSize_ = 0;
       hbaseColumnsToGT_ = java.util.Collections.emptyList();
-      startTime_ = 0L;
-      timeout_ = 0L;
       kylinProperties_ = "";
     }
     private byte memoizedIsInitialized = -1;
@@ -1030,10 +870,6 @@ public final class CubeVisitProtos {
       byte isInitialized = memoizedIsInitialized;
       if (isInitialized != -1) return isInitialized == 1;
 
-      if (!hasBehavior()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
       if (!hasGtScanRequest()) {
         memoizedIsInitialized = 0;
         return false;
@@ -1046,14 +882,6 @@ public final class CubeVisitProtos {
         memoizedIsInitialized = 0;
         return false;
       }
-      if (!hasStartTime()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
-      if (!hasTimeout()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
       if (!hasKylinProperties()) {
         memoizedIsInitialized = 0;
         return false;
@@ -1066,28 +894,19 @@ public final class CubeVisitProtos {
                         throws java.io.IOException {
       getSerializedSize();
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        output.writeBytes(1, getBehaviorBytes());
+        output.writeBytes(1, gtScanRequest_);
       }
       if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        output.writeBytes(2, gtScanRequest_);
+        output.writeBytes(2, hbaseRawScan_);
       }
       if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        output.writeBytes(3, hbaseRawScan_);
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        output.writeInt32(4, rowkeyPreambleSize_);
+        output.writeInt32(3, rowkeyPreambleSize_);
       }
       for (int i = 0; i < hbaseColumnsToGT_.size(); i++) {
-        output.writeMessage(5, hbaseColumnsToGT_.get(i));
-      }
-      if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        output.writeInt64(6, startTime_);
+        output.writeMessage(4, hbaseColumnsToGT_.get(i));
       }
-      if (((bitField0_ & 0x00000020) == 0x00000020)) {
-        output.writeInt64(7, timeout_);
-      }
-      if (((bitField0_ & 0x00000040) == 0x00000040)) {
-        output.writeBytes(8, getKylinPropertiesBytes());
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        output.writeBytes(5, getKylinPropertiesBytes());
       }
       getUnknownFields().writeTo(output);
     }
@@ -1100,35 +919,23 @@ public final class CubeVisitProtos {
       size = 0;
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(1, getBehaviorBytes());
+          .computeBytesSize(1, gtScanRequest_);
       }
       if (((bitField0_ & 0x00000002) == 0x00000002)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(2, gtScanRequest_);
+          .computeBytesSize(2, hbaseRawScan_);
       }
       if (((bitField0_ & 0x00000004) == 0x00000004)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(3, hbaseRawScan_);
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeInt32Size(4, rowkeyPreambleSize_);
+          .computeInt32Size(3, rowkeyPreambleSize_);
       }
       for (int i = 0; i < hbaseColumnsToGT_.size(); i++) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(5, hbaseColumnsToGT_.get(i));
+          .computeMessageSize(4, hbaseColumnsToGT_.get(i));
       }
-      if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeInt64Size(6, startTime_);
-      }
-      if (((bitField0_ & 0x00000020) == 0x00000020)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeInt64Size(7, timeout_);
-      }
-      if (((bitField0_ & 0x00000040) == 0x00000040)) {
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(8, getKylinPropertiesBytes());
+          .computeBytesSize(5, getKylinPropertiesBytes());
       }
       size += getUnknownFields().getSerializedSize();
       memoizedSerializedSize = size;
@@ -1153,11 +960,6 @@ public final class CubeVisitProtos {
       org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest other = (org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest) obj;
 
       boolean result = true;
-      result = result && (hasBehavior() == other.hasBehavior());
-      if (hasBehavior()) {
-        result = result && getBehavior()
-            .equals(other.getBehavior());
-      }
       result = result && (hasGtScanRequest() == other.hasGtScanRequest());
       if (hasGtScanRequest()) {
         result = result && getGtScanRequest()
@@ -1175,16 +977,6 @@ public final class CubeVisitProtos {
       }
       result = result && getHbaseColumnsToGTList()
           .equals(other.getHbaseColumnsToGTList());
-      result = result && (hasStartTime() == other.hasStartTime());
-      if (hasStartTime()) {
-        result = result && (getStartTime()
-            == other.getStartTime());
-      }
-      result = result && (hasTimeout() == other.hasTimeout());
-      if (hasTimeout()) {
-        result = result && (getTimeout()
-            == other.getTimeout());
-      }
       result = result && (hasKylinProperties() == other.hasKylinProperties());
       if (hasKylinProperties()) {
         result = result && getKylinProperties()
@@ -1203,10 +995,6 @@ public final class CubeVisitProtos {
       }
       int hash = 41;
       hash = (19 * hash) + getDescriptorForType().hashCode();
-      if (hasBehavior()) {
-        hash = (37 * hash) + BEHAVIOR_FIELD_NUMBER;
-        hash = (53 * hash) + getBehavior().hashCode();
-      }
       if (hasGtScanRequest()) {
         hash = (37 * hash) + GTSCANREQUEST_FIELD_NUMBER;
         hash = (53 * hash) + getGtScanRequest().hashCode();
@@ -1223,14 +1011,6 @@ public final class CubeVisitProtos {
         hash = (37 * hash) + HBASECOLUMNSTOGT_FIELD_NUMBER;
         hash = (53 * hash) + getHbaseColumnsToGTList().hashCode();
       }
-      if (hasStartTime()) {
-        hash = (37 * hash) + STARTTIME_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getStartTime());
-      }
-      if (hasTimeout()) {
-        hash = (37 * hash) + TIMEOUT_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getTimeout());
-      }
       if (hasKylinProperties()) {
         hash = (37 * hash) + KYLINPROPERTIES_FIELD_NUMBER;
         hash = (53 * hash) + getKylinProperties().hashCode();
@@ -1345,26 +1125,20 @@ public final class CubeVisitProtos {
 
       public Builder clear() {
         super.clear();
-        behavior_ = "";
-        bitField0_ = (bitField0_ & ~0x00000001);
         gtScanRequest_ = com.google.protobuf.ByteString.EMPTY;
-        bitField0_ = (bitField0_ & ~0x00000002);
+        bitField0_ = (bitField0_ & ~0x00000001);
         hbaseRawScan_ = com.google.protobuf.ByteString.EMPTY;
-        bitField0_ = (bitField0_ & ~0x00000004);
+        bitField0_ = (bitField0_ & ~0x00000002);
         rowkeyPreambleSize_ = 0;
-        bitField0_ = (bitField0_ & ~0x00000008);
+        bitField0_ = (bitField0_ & ~0x00000004);
         if (hbaseColumnsToGTBuilder_ == null) {
           hbaseColumnsToGT_ = java.util.Collections.emptyList();
-          bitField0_ = (bitField0_ & ~0x00000010);
+          bitField0_ = (bitField0_ & ~0x00000008);
         } else {
           hbaseColumnsToGTBuilder_.clear();
         }
-        startTime_ = 0L;
-        bitField0_ = (bitField0_ & ~0x00000020);
-        timeout_ = 0L;
-        bitField0_ = (bitField0_ & ~0x00000040);
         kylinProperties_ = "";
-        bitField0_ = (bitField0_ & ~0x00000080);
+        bitField0_ = (bitField0_ & ~0x00000010);
         return this;
       }
 
@@ -1396,38 +1170,26 @@ public final class CubeVisitProtos {
         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
           to_bitField0_ |= 0x00000001;
         }
-        result.behavior_ = behavior_;
+        result.gtScanRequest_ = gtScanRequest_;
         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
           to_bitField0_ |= 0x00000002;
         }
-        result.gtScanRequest_ = gtScanRequest_;
+        result.hbaseRawScan_ = hbaseRawScan_;
         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
           to_bitField0_ |= 0x00000004;
         }
-        result.hbaseRawScan_ = hbaseRawScan_;
-        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
-          to_bitField0_ |= 0x00000008;
-        }
         result.rowkeyPreambleSize_ = rowkeyPreambleSize_;
         if (hbaseColumnsToGTBuilder_ == null) {
-          if (((bitField0_ & 0x00000010) == 0x00000010)) {
+          if (((bitField0_ & 0x00000008) == 0x00000008)) {
             hbaseColumnsToGT_ = java.util.Collections.unmodifiableList(hbaseColumnsToGT_);
-            bitField0_ = (bitField0_ & ~0x00000010);
+            bitField0_ = (bitField0_ & ~0x00000008);
           }
           result.hbaseColumnsToGT_ = hbaseColumnsToGT_;
         } else {
           result.hbaseColumnsToGT_ = hbaseColumnsToGTBuilder_.build();
         }
-        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
-          to_bitField0_ |= 0x00000010;
-        }
-        result.startTime_ = startTime_;
-        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
-          to_bitField0_ |= 0x00000020;
-        }
-        result.timeout_ = timeout_;
-        if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
-          to_bitField0_ |= 0x00000040;
+        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+          to_bitField0_ |= 0x00000008;
         }
         result.kylinProperties_ = kylinProperties_;
         result.bitField0_ = to_bitField0_;
@@ -1446,11 +1208,6 @@ public final class CubeVisitProtos {
 
       public Builder mergeFrom(org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest other) {
         if (other == org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.getDefaultInstance()) return this;
-        if (other.hasBehavior()) {
-          bitField0_ |= 0x00000001;
-          behavior_ = other.behavior_;
-          onChanged();
-        }
         if (other.hasGtScanRequest()) {
           setGtScanRequest(other.getGtScanRequest());
         }
@@ -1464,7 +1221,7 @@ public final class CubeVisitProtos {
           if (!other.hbaseColumnsToGT_.isEmpty()) {
             if (hbaseColumnsToGT_.isEmpty()) {
               hbaseColumnsToGT_ = other.hbaseColumnsToGT_;
-              bitField0_ = (bitField0_ & ~0x00000010);
+              bitField0_ = (bitField0_ & ~0x00000008);
             } else {
               ensureHbaseColumnsToGTIsMutable();
               hbaseColumnsToGT_.addAll(other.hbaseColumnsToGT_);
@@ -1477,7 +1234,7 @@ public final class CubeVisitProtos {
               hbaseColumnsToGTBuilder_.dispose();
               hbaseColumnsToGTBuilder_ = null;
               hbaseColumnsToGT_ = other.hbaseColumnsToGT_;
-              bitField0_ = (bitField0_ & ~0x00000010);
+              bitField0_ = (bitField0_ & ~0x00000008);
               hbaseColumnsToGTBuilder_ = 
                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                    getHbaseColumnsToGTFieldBuilder() : null;
@@ -1486,14 +1243,8 @@ public final class CubeVisitProtos {
             }
           }
         }
-        if (other.hasStartTime()) {
-          setStartTime(other.getStartTime());
-        }
-        if (other.hasTimeout()) {
-          setTimeout(other.getTimeout());
-        }
         if (other.hasKylinProperties()) {
-          bitField0_ |= 0x00000080;
+          bitField0_ |= 0x00000010;
           kylinProperties_ = other.kylinProperties_;
           onChanged();
         }
@@ -1502,10 +1253,6 @@ public final class CubeVisitProtos {
       }
 
       public final boolean isInitialized() {
-        if (!hasBehavior()) {
-          
-          return false;
-        }
         if (!hasGtScanRequest()) {
           
           return false;
@@ -1518,14 +1265,6 @@ public final class CubeVisitProtos {
           
           return false;
         }
-        if (!hasStartTime()) {
-          
-          return false;
-        }
-        if (!hasTimeout()) {
-          
-          return false;
-        }
         if (!hasKylinProperties()) {
           
           return false;
@@ -1552,192 +1291,118 @@ public final class CubeVisitProtos {
       }
       private int bitField0_;
 
-      // required string behavior = 1;
-      private java.lang.Object behavior_ = "";
-      /**
-       * <code>required string behavior = 1;</code>
-       */
-      public boolean hasBehavior() {
-        return ((bitField0_ & 0x00000001) == 0x00000001);
-      }
-      /**
-       * <code>required string behavior = 1;</code>
-       */
-      public java.lang.String getBehavior() {
-        java.lang.Object ref = behavior_;
-        if (!(ref instanceof java.lang.String)) {
-          java.lang.String s = ((com.google.protobuf.ByteString) ref)
-              .toStringUtf8();
-          behavior_ = s;
-          return s;
-        } else {
-          return (java.lang.String) ref;
-        }
-      }
-      /**
-       * <code>required string behavior = 1;</code>
-       */
-      public com.google.protobuf.ByteString
-          getBehaviorBytes() {
-        java.lang.Object ref = behavior_;
-        if (ref instanceof String) {
-          com.google.protobuf.ByteString b = 
-              com.google.protobuf.ByteString.copyFromUtf8(
-                  (java.lang.String) ref);
-          behavior_ = b;
-          return b;
-        } else {
-          return (com.google.protobuf.ByteString) ref;
-        }
-      }
-      /**
-       * <code>required string behavior = 1;</code>
-       */
-      public Builder setBehavior(
-          java.lang.String value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000001;
-        behavior_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>required string behavior = 1;</code>
-       */
-      public Builder clearBehavior() {
-        bitField0_ = (bitField0_ & ~0x00000001);
-        behavior_ = getDefaultInstance().getBehavior();
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>required string behavior = 1;</code>
-       */
-      public Builder setBehaviorBytes(
-          com.google.protobuf.ByteString value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000001;
-        behavior_ = value;
-        onChanged();
-        return this;
-      }
-
-      // required bytes gtScanRequest = 2;
+      // required bytes gtScanRequest = 1;
       private com.google.protobuf.ByteString gtScanRequest_ = com.google.protobuf.ByteString.EMPTY;
       /**
-       * <code>required bytes gtScanRequest = 2;</code>
+       * <code>required bytes gtScanRequest = 1;</code>
        */
       public boolean hasGtScanRequest() {
-        return ((bitField0_ & 0x00000002) == 0x00000002);
+        return ((bitField0_ & 0x00000001) == 0x00000001);
       }
       /**
-       * <code>required bytes gtScanRequest = 2;</code>
+       * <code>required bytes gtScanRequest = 1;</code>
        */
       public com.google.protobuf.ByteString getGtScanRequest() {
         return gtScanRequest_;
       }
       /**
-       * <code>required bytes gtScanRequest = 2;</code>
+       * <code>required bytes gtScanRequest = 1;</code>
        */
       public Builder setGtScanRequest(com.google.protobuf.ByteString value) {
         if (value == null) {
     throw new NullPointerException();
   }
-  bitField0_ |= 0x00000002;
+  bitField0_ |= 0x00000001;
         gtScanRequest_ = value;
         onChanged();
         return this;
       }
       /**
-       * <code>required bytes gtScanRequest = 2;</code>
+       * <code>required bytes gtScanRequest = 1;</code>
        */
       public Builder clearGtScanRequest() {
-        bitField0_ = (bitField0_ & ~0x00000002);
+        bitField0_ = (bitField0_ & ~0x00000001);
         gtScanRequest_ = getDefaultInstance().getGtScanRequest();
         onChanged();
         return this;
       }
 
-      // required bytes hbaseRawScan = 3;
+      // required bytes hbaseRawScan = 2;
       private com.google.protobuf.ByteString hbaseRawScan_ = com.google.protobuf.ByteString.EMPTY;
       /**
-       * <code>required bytes hbaseRawScan = 3;</code>
+       * <code>required bytes hbaseRawScan = 2;</code>
        */
       public boolean hasHbaseRawScan() {
-        return ((bitField0_ & 0x00000004) == 0x00000004);
+        return ((bitField0_ & 0x00000002) == 0x00000002);
       }
       /**
-       * <code>required bytes hbaseRawScan = 3;</code>
+       * <code>required bytes hbaseRawScan = 2;</code>
        */
       public com.google.protobuf.ByteString getHbaseRawScan() {
         return hbaseRawScan_;
       }
       /**
-       * <code>required bytes hbaseRawScan = 3;</code>
+       * <code>required bytes hbaseRawScan = 2;</code>
        */
       public Builder setHbaseRawScan(com.google.protobuf.ByteString value) {
         if (value == null) {
     throw new NullPointerException();
   }
-  bitField0_ |= 0x00000004;
+  bitField0_ |= 0x00000002;
         hbaseRawScan_ = value;
         onChanged();
         return this;
       }
       /**
-       * <code>required bytes hbaseRawScan = 3;</code>
+       * <code>required bytes hbaseRawScan = 2;</code>
        */
       public Builder clearHbaseRawScan() {
-        bitField0_ = (bitField0_ & ~0x00000004);
+        bitField0_ = (bitField0_ & ~0x00000002);
         hbaseRawScan_ = getDefaultInstance().getHbaseRawScan();
         onChanged();
         return this;
       }
 
-      // required int32 rowkeyPreambleSize = 4;
+      // required int32 rowkeyPreambleSize = 3;
       private int rowkeyPreambleSize_ ;
       /**
-       * <code>required int32 rowkeyPreambleSize = 4;</code>
+       * <code>required int32 rowkeyPreambleSize = 3;</code>
        */
       public boolean hasRowkeyPreambleSize() {
-        return ((bitField0_ & 0x00000008) == 0x00000008);
+        return ((bitField0_ & 0x00000004) == 0x00000004);
       }
       /**
-       * <code>required int32 rowkeyPreambleSize = 4;</code>
+       * <code>required int32 rowkeyPreambleSize = 3;</code>
        */
       public int getRowkeyPreambleSize() {
         return rowkeyPreambleSize_;
       }
       /**
-       * <code>required int32 rowkeyPreambleSize = 4;</code>
+       * <code>required int32 rowkeyPreambleSize = 3;</code>
        */
       public Builder setRowkeyPreambleSize(int value) {
-        bitField0_ |= 0x00000008;
+        bitField0_ |= 0x00000004;
         rowkeyPreambleSize_ = value;
         onChanged();
         return this;
       }
       /**
-       * <code>required int32 rowkeyPreambleSize = 4;</code>
+       * <code>required int32 rowkeyPreambleSize = 3;</code>
        */
       public Builder clearRowkeyPreambleSize() {
-        bitField0_ = (bitField0_ & ~0x00000008);
+        bitField0_ = (bitField0_ & ~0x00000004);
         rowkeyPreambleSize_ = 0;
         onChanged();
         return this;
       }
 
-      // repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;
+      // repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;
       private java.util.List<org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList> hbaseColumnsToGT_ =
         java.util.Collections.emptyList();
       private void ensureHbaseColumnsToGTIsMutable() {
-        if (!((bitField0_ & 0x00000010) == 0x00000010)) {
+        if (!((bitField0_ & 0x00000008) == 0x00000008)) {
           hbaseColumnsToGT_ = new java.util.ArrayList<org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList>(hbaseColumnsToGT_);
-          bitField0_ |= 0x00000010;
+          bitField0_ |= 0x00000008;
          }
       }
 
@@ -1745,7 +1410,7 @@ public final class CubeVisitProtos {
           org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList, org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.Builder, org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntListOrBuilder> hbaseColumnsToGTBuilder_;
 
       /**
-       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
        */
       public java.util.List<org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList> getHbaseColumnsToGTList() {
         if (hbaseColumnsToGTBuilder_ == null) {
@@ -1755,7 +1420,7 @@ public final class CubeVisitProtos {
         }
       }
       /**
-       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
        */
       public int getHbaseColumnsToGTCount() {
         if (hbaseColumnsToGTBuilder_ == null) {
@@ -1765,7 +1430,7 @@ public final class CubeVisitProtos {
         }
       }
       /**
-       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
        */
       public org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList getHbaseColumnsToGT(int index) {
         if (hbaseColumnsToGTBuilder_ == null) {
@@ -1775,7 +1440,7 @@ public final class CubeVisitProtos {
         }
       }
       /**
-       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
        */
       public Builder setHbaseColumnsToGT(
           int index, org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList value) {
@@ -1792,7 +1457,7 @@ public final class CubeVisitProtos {
         return this;
       }
       /**
-       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
        */
       public Builder setHbaseColumnsToGT(
           int index, org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.Builder builderForValue) {
@@ -1806,7 +1471,7 @@ public final class CubeVisitProtos {
         return this;
       }
       /**
-       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
        */
       public Builder addHbaseColumnsToGT(org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList value) {
         if (hbaseColumnsToGTBuilder_ == null) {
@@ -1822,7 +1487,7 @@ public final class CubeVisitProtos {
         return this;
       }
       /**
-       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
        */
       public Builder addHbaseColumnsToGT(
           int index, org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList value) {
@@ -1839,7 +1504,7 @@ public final class CubeVisitProtos {
         return this;
       }
       /**
-       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
        */
       public Builder addHbaseColumnsToGT(
           org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.Builder builderForValue) {
@@ -1853,7 +1518,7 @@ public final class CubeVisitProtos {
         return this;
       }
       /**
-       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
        */
       public Builder addHbaseColumnsToGT(
           int index, org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.Builder builderForValue) {
@@ -1867,7 +1532,7 @@ public final class CubeVisitProtos {
         return this;
       }
       /**
-       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
        */
       public Builder addAllHbaseColumnsToGT(
           java.lang.Iterable<? extends org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList> values) {
@@ -1881,12 +1546,12 @@ public final class CubeVisitProtos {
         return this;
       }
       /**
-       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
        */
       public Builder clearHbaseColumnsToGT() {
         if (hbaseColumnsToGTBuilder_ == null) {
           hbaseColumnsToGT_ = java.util.Collections.emptyList();
-          bitField0_ = (bitField0_ & ~0x00000010);
+          bitField0_ = (bitField0_ & ~0x00000008);
           onChanged();
         } else {
           hbaseColumnsToGTBuilder_.clear();
@@ -1894,7 +1559,7 @@ public final class CubeVisitProtos {
         return this;
       }
       /**
-       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
        */
       public Builder removeHbaseColumnsToGT(int index) {
         if (hbaseColumnsToGTBuilder_ == null) {
@@ -1907,14 +1572,14 @@ public final class CubeVisitProtos {
         return this;
       }
       /**
-       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
        */
       public org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.Builder getHbaseColumnsToGTBuilder(
           int index) {
         return getHbaseColumnsToGTFieldBuilder().getBuilder(index);
       }
       /**
-       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
        */
       public org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntListOrBuilder getHbaseColumnsToGTOrBuilder(
           int index) {
@@ -1924,7 +1589,7 @@ public final class CubeVisitProtos {
         }
       }
       /**
-       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
        */
       public java.util.List<? extends org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntListOrBuilder> 
            getHbaseColumnsToGTOrBuilderList() {
@@ -1935,14 +1600,14 @@ public final class CubeVisitProtos {
         }
       }
       /**
-       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
        */
       public org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.Builder addHbaseColumnsToGTBuilder() {
         return getHbaseColumnsToGTFieldBuilder().addBuilder(
             org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.getDefaultInstance());
       }
       /**
-       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
        */
       public org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.Builder addHbaseColumnsToGTBuilder(
           int index) {
@@ -1950,7 +1615,7 @@ public final class CubeVisitProtos {
             index, org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.getDefaultInstance());
       }
       /**
-       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 5;</code>
+       * <code>repeated .CubeVisitRequest.IntList hbaseColumnsToGT = 4;</code>
        */
       public java.util.List<org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.Builder> 
            getHbaseColumnsToGTBuilderList() {
@@ -1963,7 +1628,7 @@ public final class CubeVisitProtos {
           hbaseColumnsToGTBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
               org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList, org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList.Builder, org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntListOrBuilder>(
                   hbaseColumnsToGT_,
-                  ((bitField0_ & 0x00000010) == 0x00000010),
+                  ((bitField0_ & 0x00000008) == 0x00000008),
                   getParentForChildren(),
                   isClean());
           hbaseColumnsToGT_ = null;
@@ -1971,118 +1636,20 @@ public final class CubeVisitProtos {
         return hbaseColumnsToGTBuilder_;
       }
 
-      // required int64 startTime = 6;
-      private long startTime_ ;
-      /**
-       * <code>required int64 startTime = 6;</code>
-       *
-       * <pre>
-       *when client start the request
-       * </pre>
-       */
-      public boolean hasStartTime() {
-        return ((bitField0_ & 0x00000020) == 0x00000020);
-      }
-      /**
-       * <code>required int64 startTime = 6;</code>
-       *
-       * <pre>
-       *when client start the request
-       * </pre>
-       */
-      public long getStartTime() {
-        return startTime_;
-      }
-      /**
-       * <code>required int64 startTime = 6;</code>
-       *
-       * <pre>
-       *when client start the request
-       * </pre>
-       */
-      public Builder setStartTime(long value) {
-        bitField0_ |= 0x00000020;
-        startTime_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>required int64 startTime = 6;</code>
-       *
-       * <pre>
-       *when client start the request
-       * </pre>
-       */
-      public Builder clearStartTime() {
-        bitField0_ = (bitField0_ & ~0x00000020);
-        startTime_ = 0L;
-        onChanged();
-        return this;
-      }
-
-      // required int64 timeout = 7;
-      private long timeout_ ;
-      /**
-       * <code>required int64 timeout = 7;</code>
-       *
-       * <pre>
-       *how long client will wait
-       * </pre>
-       */
-      public boolean hasTimeout() {
-        return ((bitField0_ & 0x00000040) == 0x00000040);
-      }
-      /**
-       * <code>required int64 timeout = 7;</code>
-       *
-       * <pre>
-       *how long client will wait
-       * </pre>
-       */
-      public long getTimeout() {
-        return timeout_;
-      }
-      /**
-       * <code>required int64 timeout = 7;</code>
-       *
-       * <pre>
-       *how long client will wait
-       * </pre>
-       */
-      public Builder setTimeout(long value) {
-        bitField0_ |= 0x00000040;
-        timeout_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>required int64 timeout = 7;</code>
-       *
-       * <pre>
-       *how long client will wait
-       * </pre>
-       */
-      public Builder clearTimeout() {
-        bitField0_ = (bitField0_ & ~0x00000040);
-        timeout_ = 0L;
-        onChanged();
-        return this;
-      }
-
-      // required string kylinProperties = 8;
+      // required string kylinProperties = 5;
       private java.lang.Object kylinProperties_ = "";
       /**
-       * <code>required string kylinProperties = 8;</code>
+       * <code>required string kylinProperties = 5;</code>
        *
        * <pre>
        * kylin properties
        * </pre>
        */
       public boolean hasKylinProperties() {
-        return ((bitField0_ & 0x00000080) == 0x00000080);
+        return ((bitField0_ & 0x00000010) == 0x00000010);
       }
       /**
-       * <code>required string kylinProperties = 8;</code>
+       * <code>required string kylinProperties = 5;</code>
        *
        * <pre>
        * kylin properties
@@ -2100,7 +1667,7 @@ public final class CubeVisitProtos {
         }
       }
       /**
-       * <code>required string kylinProperties = 8;</code>
+       * <code>required string kylinProperties = 5;</code>
        *
        * <pre>
        * kylin properties
@@ -2120,7 +1687,7 @@ public final class CubeVisitProtos {
         }
       }
       /**
-       * <code>required string kylinProperties = 8;</code>
+       * <code>required string kylinProperties = 5;</code>
        *
        * <pre>
        * kylin properties
@@ -2131,26 +1698,26 @@ public final class CubeVisitProtos {
         if (value == null) {
     throw new NullPointerException();
   }
-  bitField0_ |= 0x00000080;
+  bitField0_ |= 0x00000010;
         kylinProperties_ = value;
         onChanged();
         return this;
       }
       /**
-       * <code>required string kylinProperties = 8;</code>
+       * <code>required string kylinProperties = 5;</code>
        *
        * <pre>
        * kylin properties
        * </pre>
        */
       public Builder clearKylinProperties() {
-        bitField0_ = (bitField0_ & ~0x00000080);
+        bitField0_ = (bitField0_ & ~0x00000010);
         kylinProperties_ = getDefaultInstance().getKylinProperties();
         onChanged();
         return this;
       }
       /**
-       * <code>required string kylinProperties = 8;</code>
+       * <code>required string kylinProperties = 5;</code>
        *
        * <pre>
        * kylin properties
@@ -2161,7 +1728,7 @@ public final class CubeVisitProtos {
         if (value == null) {
     throw new NullPointerException();
   }
-  bitField0_ |= 0x00000080;
+  bitField0_ |= 0x00000010;
         kylinProperties_ = value;
         onChanged();
         return this;
@@ -4521,26 +4088,25 @@ public final class CubeVisitProtos {
     java.lang.String[] descriptorData = {
       "\npstorage-hbase/src/main/java/org/apache" +
       "/kylin/storage/hbase/cube/v2/coprocessor" +
-      "/endpoint/protobuf/CubeVisit.proto\"\370\001\n\020C" +
-      "ubeVisitRequest\022\020\n\010behavior\030\001 \002(\t\022\025\n\rgtS" +
-      "canRequest\030\002 \002(\014\022\024\n\014hbaseRawScan\030\003 \002(\014\022\032" +
-      "\n\022rowkeyPreambleSize\030\004 \002(\005\0223\n\020hbaseColum" +
-      "nsToGT\030\005 \003(\0132\031.CubeVisitRequest.IntList\022" +
-      "\021\n\tstartTime\030\006 \002(\003\022\017\n\007timeout\030\007 \002(\003\022\027\n\017k" +
-      "ylinProperties\030\010 \002(\t\032\027\n\007IntList\022\014\n\004ints\030" +
-      "\001 \003(\005\"\321\002\n\021CubeVisitResponse\022\026\n\016compresse",
-      "dRows\030\001 \002(\014\022\'\n\005stats\030\002 \002(\0132\030.CubeVisitRe" +
-      "sponse.Stats\032\372\001\n\005Stats\022\030\n\020serviceStartTi" +
-      "me\030\001 \001(\003\022\026\n\016serviceEndTime\030\002 \001(\003\022\027\n\017scan" +
-      "nedRowCount\030\003 \001(\003\022\032\n\022aggregatedRowCount\030" +
-      "\004 \001(\003\022\025\n\rsystemCpuLoad\030\005 \001(\001\022\036\n\026freePhys" +
-      "icalMemorySize\030\006 \001(\001\022\031\n\021freeSwapSpaceSiz" +
-      "e\030\007 \001(\001\022\020\n\010hostname\030\010 \001(\t\022\016\n\006etcMsg\030\t \001(" +
-      "\t\022\026\n\016normalComplete\030\n \001(\0052F\n\020CubeVisitSe" +
-      "rvice\0222\n\tvisitCube\022\021.CubeVisitRequest\032\022." +
-      "CubeVisitResponseB`\nEorg.apache.kylin.st",
-      "orage.hbase.cube.v2.coprocessor.endpoint" +
-      ".generatedB\017CubeVisitProtosH\001\210\001\001\240\001\001"
+      "/endpoint/protobuf/CubeVisit.proto\"\302\001\n\020C" +
+      "ubeVisitRequest\022\025\n\rgtScanRequest\030\001 \002(\014\022\024" +
+      "\n\014hbaseRawScan\030\002 \002(\014\022\032\n\022rowkeyPreambleSi" +
+      "ze\030\003 \002(\005\0223\n\020hbaseColumnsToGT\030\004 \003(\0132\031.Cub" +
+      "eVisitRequest.IntList\022\027\n\017kylinProperties" +
+      "\030\005 \002(\t\032\027\n\007IntList\022\014\n\004ints\030\001 \003(\005\"\321\002\n\021Cube" +
+      "VisitResponse\022\026\n\016compressedRows\030\001 \002(\014\022\'\n" +
+      "\005stats\030\002 \002(\0132\030.CubeVisitResponse.Stats\032\372",
+      "\001\n\005Stats\022\030\n\020serviceStartTime\030\001 \001(\003\022\026\n\016se" +
+      "rviceEndTime\030\002 \001(\003\022\027\n\017scannedRowCount\030\003 " +
+      "\001(\003\022\032\n\022aggregatedRowCount\030\004 \001(\003\022\025\n\rsyste" +
+      "mCpuLoad\030\005 \001(\001\022\036\n\026freePhysicalMemorySize" +
+      "\030\006 \001(\001\022\031\n\021freeSwapSpaceSize\030\007 \001(\001\022\020\n\010hos" +
+      "tname\030\010 \001(\t\022\016\n\006etcMsg\030\t \001(\t\022\026\n\016normalCom" +
+      "plete\030\n \001(\0052F\n\020CubeVisitService\0222\n\tvisit" +
+      "Cube\022\021.CubeVisitRequest\032\022.CubeVisitRespo" +
+      "nseB`\nEorg.apache.kylin.storage.hbase.cu" +
+      "be.v2.coprocessor.endpoint.generatedB\017Cu",
+      "beVisitProtosH\001\210\001\001\240\001\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -4552,7 +4118,7 @@ public final class CubeVisitProtos {
           internal_static_CubeVisitRequest_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_CubeVisitRequest_descriptor,
-              new java.lang.String[] { "Behavior", "GtScanRequest", "HbaseRawScan", "RowkeyPreambleSize", "HbaseColumnsToGT", "StartTime", "Timeout", "KylinProperties", });
+              new java.lang.String[] { "GtScanRequest", "HbaseRawScan", "RowkeyPreambleSize", "HbaseColumnsToGT", "KylinProperties", });
           internal_static_CubeVisitRequest_IntList_descriptor =
             internal_static_CubeVisitRequest_descriptor.getNestedTypes().get(0);
           internal_static_CubeVisitRequest_IntList_fieldAccessorTable = new

http://git-wip-us.apache.org/repos/asf/kylin/blob/a2c875d8/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/protobuf/CubeVisit.proto
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/protobuf/CubeVisit.proto b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/protobuf/CubeVisit.proto
index e1de070..c84f4f3 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/protobuf/CubeVisit.proto
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/protobuf/CubeVisit.proto
@@ -30,14 +30,11 @@ option java_generate_equals_and_hash = true;
 option optimize_for = SPEED;
 
 message CubeVisitRequest {
-    required string behavior = 1;
-    required bytes gtScanRequest = 2;
-    required bytes hbaseRawScan = 3;
-    required int32 rowkeyPreambleSize = 4;
-    repeated IntList hbaseColumnsToGT = 5;
-    required int64 startTime = 6;//when client start the request
-    required int64 timeout = 7;//how long client will wait
-    required string kylinProperties = 8; // kylin properties
+    required bytes gtScanRequest = 1;
+    required bytes hbaseRawScan = 2;
+    required int32 rowkeyPreambleSize = 3;
+    repeated IntList hbaseColumnsToGT = 4;
+    required string kylinProperties = 5; // kylin properties
     message IntList {
         repeated int32 ints = 1;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/a2c875d8/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java
index f8e2644..390930a 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java
@@ -40,7 +40,7 @@ import org.apache.kylin.metadata.datatype.LongMutable;
 import org.apache.kylin.metadata.model.ColumnDesc;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.metadata.model.TblColRef;
-import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorBehavior;
+import org.apache.kylin.gridtable.StorageSideBehavior;
 import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorFilter;
 import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorProjector;
 import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorRowType;
@@ -121,7 +121,7 @@ public class AggregateRegionObserverTest {
 
         MockupRegionScanner innerScanner = new MockupRegionScanner(cellsInput);
 
-        RegionScanner aggrScanner = new AggregationScanner(rowType, filter, projector, aggregators, innerScanner, CoprocessorBehavior.SCAN_FILTER_AGGR_CHECKMEM);
+        RegionScanner aggrScanner = new AggregationScanner(rowType, filter, projector, aggregators, innerScanner, StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM);
         ArrayList<Cell> result = Lists.newArrayList();
         boolean hasMore = true;
         while (hasMore) {
@@ -170,7 +170,7 @@ public class AggregateRegionObserverTest {
 
         MockupRegionScanner innerScanner = new MockupRegionScanner(cellsInput);
 
-        RegionScanner aggrScanner = new AggregationScanner(rowType, filter, projector, aggregators, innerScanner, CoprocessorBehavior.SCAN_FILTER_AGGR_CHECKMEM);
+        RegionScanner aggrScanner = new AggregationScanner(rowType, filter, projector, aggregators, innerScanner, StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM);
         ArrayList<Cell> result = Lists.newArrayList();
         boolean hasMore = true;
         while (hasMore) {


[50/50] [abbrv] kylin git commit: KYLIN-1726 Scalable streaming cubing

Posted by sh...@apache.org.
KYLIN-1726 Scalable streaming cubing


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/81c7323b
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/81c7323b
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/81c7323b

Branch: refs/heads/KYLIN-1726
Commit: 81c7323b633df88eedac8b319fc57f9b62b01a4a
Parents: 17569f6
Author: shaofengshi <sh...@apache.org>
Authored: Tue Aug 30 14:44:47 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Wed Sep 14 16:34:36 2016 +0800

----------------------------------------------------------------------
 .../java/org/apache/kylin/job/DeployUtil.java   |  35 ++-
 .../kylin/job/streaming/Kafka10DataLoader.java  |  80 +++++++
 .../apache/kylin/common/KylinConfigBase.java    |   1 +
 .../java/org/apache/kylin/cube/CubeSegment.java |   1 +
 .../java/org/apache/kylin/cube/ISegment.java    |  39 ----
 .../cube/gridtable/SegmentGTStartAndEnd.java    |   2 +-
 .../cube/model/CubeJoinedFlatTableDesc.java     |   6 +
 .../cube/model/CubeJoinedFlatTableEnrich.java   |   6 +
 .../apache/kylin/gridtable/ScannerWorker.java   |   2 +-
 .../metadata/model/IJoinedFlatTableDesc.java    |   2 +
 .../apache/kylin/metadata/model/ISegment.java   |  36 +++
 .../kylin/engine/mr/BatchMergeJobBuilder2.java  |   3 +
 .../org/apache/kylin/engine/mr/IMRInput.java    |  10 +
 .../java/org/apache/kylin/engine/mr/MRUtil.java |   4 +
 .../test_streaming_table_model_desc.json        |   6 +-
 .../kylin/provision/BuildCubeWithStream.java    | 227 ++++++++++++++-----
 .../org/apache/kylin/provision/MockKafka.java   | 191 ++++++++++++++++
 .../apache/kylin/provision/NetworkUtils.java    |  52 +++++
 pom.xml                                         |   2 +-
 .../apache/kylin/source/hive/HiveMRInput.java   |  11 +
 source-kafka/pom.xml                            |  13 +-
 .../kylin/source/kafka/KafkaConfigManager.java  |  46 ++--
 .../apache/kylin/source/kafka/KafkaMRInput.java | 221 ++++++++++++++++++
 .../apache/kylin/source/kafka/KafkaSource.java  |  57 +++++
 .../kylin/source/kafka/KafkaStreamingInput.java |  65 +++---
 .../kylin/source/kafka/MergeOffsetStep.java     |  89 ++++++++
 .../kylin/source/kafka/SeekOffsetStep.java      | 119 ++++++++++
 .../kylin/source/kafka/StreamingParser.java     |  49 ++--
 .../source/kafka/StringStreamingParser.java     |  49 ++--
 .../source/kafka/TimedJsonStreamParser.java     |  49 ++--
 .../apache/kylin/source/kafka/TopicMeta.java    |  49 ++--
 .../kylin/source/kafka/UpdateTimeRangeStep.java | 108 +++++++++
 .../source/kafka/config/KafkaClusterConfig.java |   3 +-
 .../source/kafka/hadoop/KafkaFlatTableJob.java  | 165 ++++++++++++++
 .../kafka/hadoop/KafkaFlatTableMapper.java      |  51 +++++
 .../source/kafka/hadoop/KafkaInputFormat.java   |  98 ++++++++
 .../kafka/hadoop/KafkaInputRecordReader.java    | 166 ++++++++++++++
 .../source/kafka/hadoop/KafkaInputSplit.java    | 102 +++++++++
 .../kylin/source/kafka/util/KafkaClient.java    | 115 ++++++++++
 .../source/kafka/util/KafkaOffsetMapping.java   |  97 ++++++++
 .../kylin/source/kafka/util/KafkaRequester.java |  56 ++---
 .../kylin/source/kafka/util/KafkaUtils.java     |   3 +-
 .../hbase/cube/v2/CubeHBaseEndpointRPC.java     |   2 +-
 .../storage/hbase/cube/v2/CubeHBaseRPC.java     |   2 +-
 .../storage/hbase/cube/v2/CubeHBaseScanRPC.java |   2 +-
 45 files changed, 2140 insertions(+), 352 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java b/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
index 8c64f91..9b282e3 100644
--- a/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
+++ b/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
@@ -143,14 +143,12 @@ public class DeployUtil {
         deployHiveTables();
     }
 
-    public static void prepareTestDataForStreamingCube(long startTime, long endTime, String cubeName, StreamDataLoader streamDataLoader) throws IOException {
+    public static void prepareTestDataForStreamingCube(long startTime, long endTime, int numberOfRecords, String cubeName, StreamDataLoader streamDataLoader) throws IOException {
         CubeInstance cubeInstance = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube(cubeName);
-        List<String> data = StreamingTableDataGenerator.generate(10000, startTime, endTime, cubeInstance.getFactTable());
-        List<String> data2 = StreamingTableDataGenerator.generate(10, endTime, endTime + 300000, cubeInstance.getFactTable());
+        List<String> data = StreamingTableDataGenerator.generate(numberOfRecords, startTime, endTime, cubeInstance.getFactTable());
         TableDesc tableDesc = cubeInstance.getFactTableDesc();
         //load into kafka
         streamDataLoader.loadIntoKafka(data);
-        streamDataLoader.loadIntoKafka(data2);
         logger.info("Write {} messages into {}", data.size(), streamDataLoader.toString());
 
         //csv data for H2 use
@@ -165,7 +163,7 @@ public class DeployUtil {
             sb.append(StringUtils.join(rowColumns, ","));
             sb.append(System.getProperty("line.separator"));
         }
-        overrideFactTableData(sb.toString(), cubeInstance.getFactTable());
+        appendFactTableData(sb.toString(), cubeInstance.getFactTable());
     }
 
     public static void overrideFactTableData(String factTableContent, String factTableName) throws IOException {
@@ -179,6 +177,33 @@ public class DeployUtil {
         in.close();
     }
 
+    public static void appendFactTableData(String factTableContent, String factTableName) throws IOException {
+        // Write to resource store
+        ResourceStore store = ResourceStore.getStore(config());
+
+        InputStream in = new ByteArrayInputStream(factTableContent.getBytes("UTF-8"));
+        String factTablePath = "/data/" + factTableName + ".csv";
+
+        File tmpFile = File.createTempFile(factTableName, "csv");
+        FileOutputStream out = new FileOutputStream(tmpFile);
+
+        try {
+            if (store.exists(factTablePath)) {
+                InputStream oldContent = store.getResource(factTablePath).inputStream;
+                IOUtils.copy(oldContent, out);
+            }
+            IOUtils.copy(in, out);
+            IOUtils.closeQuietly(in);
+
+            store.deleteResource(factTablePath);
+            in = new FileInputStream(tmpFile);
+            store.putResource(factTablePath, in, System.currentTimeMillis());
+        } finally {
+            IOUtils.closeQuietly(out);
+            IOUtils.closeQuietly(in);
+        }
+
+    }
 
     private static void deployHiveTables() throws Exception {
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/assembly/src/test/java/org/apache/kylin/job/streaming/Kafka10DataLoader.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/streaming/Kafka10DataLoader.java b/assembly/src/test/java/org/apache/kylin/job/streaming/Kafka10DataLoader.java
new file mode 100644
index 0000000..a5132af
--- /dev/null
+++ b/assembly/src/test/java/org/apache/kylin/job/streaming/Kafka10DataLoader.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job.streaming;
+
+import java.util.List;
+import java.util.Properties;
+
+import javax.annotation.Nullable;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kylin.source.kafka.config.BrokerConfig;
+import org.apache.kylin.source.kafka.config.KafkaClusterConfig;
+import org.apache.kylin.source.kafka.config.KafkaConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Function;
+import com.google.common.collect.Collections2;
+
+import org.apache.kylin.source.kafka.util.KafkaClient;
+
+/**
+ * Load prepared data into kafka(for test use)
+ */
+public class Kafka10DataLoader extends StreamDataLoader {
+    private static final Logger logger = LoggerFactory.getLogger(Kafka10DataLoader.class);
+    List<KafkaClusterConfig> kafkaClusterConfigs;
+
+    public Kafka10DataLoader(KafkaConfig kafkaConfig) {
+        super(kafkaConfig);
+        this.kafkaClusterConfigs = kafkaConfig.getKafkaClusterConfigs();
+    }
+
+    public void loadIntoKafka(List<String> messages) {
+
+        KafkaClusterConfig clusterConfig = kafkaClusterConfigs.get(0);
+        String brokerList = StringUtils.join(Collections2.transform(clusterConfig.getBrokerConfigs(), new Function<BrokerConfig, String>() {
+            @Nullable
+            @Override
+            public String apply(BrokerConfig brokerConfig) {
+                return brokerConfig.getHost() + ":" + brokerConfig.getPort();
+            }
+        }), ",");
+
+        Properties props = new Properties();
+        props.put("acks", "1");
+        props.put("retry.backoff.ms", "1000");
+        KafkaProducer producer = KafkaClient.getKafkaProducer(brokerList, props);
+
+        int boundary = messages.size() / 10;
+        for (int i = 0; i < messages.size(); ++i) {
+            ProducerRecord<String, String> keyedMessage = new ProducerRecord<String, String>(clusterConfig.getTopic(), String.valueOf(i), messages.get(i));
+            producer.send(keyedMessage);
+            if (i % boundary == 0) {
+                logger.info("sending " + i + " messages to " + this.toString());
+            }
+        }
+        logger.info("sent " + messages.size() + " messages to " + this.toString());
+        producer.close();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 79ee084..fafb1fc 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -717,6 +717,7 @@ abstract public class KylinConfigBase implements Serializable {
         Map<Integer, String> r = convertKeyToInteger(getPropertiesByPrefix("kylin.source.engine."));
         // ref constants in ISourceAware
         r.put(0, "org.apache.kylin.source.hive.HiveSource");
+        r.put(1, "org.apache.kylin.source.kafka.KafkaSource");
         return r;
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java
index 79397c3..afb0d28 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java
@@ -37,6 +37,7 @@ import org.apache.kylin.cube.kv.RowConstants;
 import org.apache.kylin.cube.model.CubeDesc;
 import org.apache.kylin.metadata.model.DataModelDesc;
 import org.apache.kylin.metadata.model.IBuildable;
+import org.apache.kylin.metadata.model.ISegment;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.model.TblColRef;
 import org.apache.kylin.metadata.realization.IRealization;

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/core-cube/src/main/java/org/apache/kylin/cube/ISegment.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/ISegment.java b/core-cube/src/main/java/org/apache/kylin/cube/ISegment.java
deleted file mode 100644
index 2e1f214..0000000
--- a/core-cube/src/main/java/org/apache/kylin/cube/ISegment.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *  
- *     http://www.apache.org/licenses/LICENSE-2.0
- *  
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.cube;
-
-import org.apache.kylin.metadata.model.DataModelDesc;
-import org.apache.kylin.metadata.model.SegmentStatusEnum;
-
-public interface ISegment {
-
-    public String getName();
-
-    public long getDateRangeStart();
-
-    public long getDateRangeEnd();
-
-    public long getSourceOffsetStart();
-
-    public long getSourceOffsetEnd();
-    
-    public DataModelDesc getModel();
-
-    public SegmentStatusEnum getStatus();
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/core-cube/src/main/java/org/apache/kylin/cube/gridtable/SegmentGTStartAndEnd.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/gridtable/SegmentGTStartAndEnd.java b/core-cube/src/main/java/org/apache/kylin/cube/gridtable/SegmentGTStartAndEnd.java
index e31111d..889a0b2 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/gridtable/SegmentGTStartAndEnd.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/gridtable/SegmentGTStartAndEnd.java
@@ -24,7 +24,7 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.util.ByteArray;
 import org.apache.kylin.common.util.DateFormat;
 import org.apache.kylin.common.util.Pair;
-import org.apache.kylin.cube.ISegment;
+import org.apache.kylin.metadata.model.ISegment;
 import org.apache.kylin.dimension.AbstractDateDimEnc;
 import org.apache.kylin.gridtable.GTInfo;
 import org.apache.kylin.metadata.datatype.DataType;

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableDesc.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableDesc.java
index 6aeb617..6ca89c8 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableDesc.java
@@ -26,6 +26,7 @@ import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.metadata.model.DataModelDesc;
 import org.apache.kylin.metadata.model.FunctionDesc;
 import org.apache.kylin.metadata.model.IJoinedFlatTableDesc;
+import org.apache.kylin.metadata.model.ISegment;
 import org.apache.kylin.metadata.model.MeasureDesc;
 import org.apache.kylin.metadata.model.TblColRef;
 
@@ -162,4 +163,9 @@ public class CubeJoinedFlatTableDesc implements IJoinedFlatTableDesc {
         return cubeDesc.getDistributedByColumn();
     }
 
+    @Override
+    public ISegment getSegment() {
+        return cubeSegment;
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableEnrich.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableEnrich.java b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableEnrich.java
index 5212859..8af2297 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableEnrich.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableEnrich.java
@@ -25,6 +25,7 @@ import org.apache.kylin.cube.cuboid.Cuboid;
 import org.apache.kylin.metadata.model.DataModelDesc;
 import org.apache.kylin.metadata.model.FunctionDesc;
 import org.apache.kylin.metadata.model.IJoinedFlatTableDesc;
+import org.apache.kylin.metadata.model.ISegment;
 import org.apache.kylin.metadata.model.MeasureDesc;
 import org.apache.kylin.metadata.model.TblColRef;
 
@@ -137,4 +138,9 @@ public class CubeJoinedFlatTableEnrich implements IJoinedFlatTableDesc {
         return flatDesc.getDistributedBy();
     }
 
+    @Override
+    public ISegment getSegment() {
+        return flatDesc.getSegment();
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/core-cube/src/main/java/org/apache/kylin/gridtable/ScannerWorker.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/ScannerWorker.java b/core-cube/src/main/java/org/apache/kylin/gridtable/ScannerWorker.java
index bb7503a..4213cf3 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/ScannerWorker.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/ScannerWorker.java
@@ -24,7 +24,7 @@ import java.io.IOException;
 import java.lang.reflect.InvocationTargetException;
 import java.util.Iterator;
 
-import org.apache.kylin.cube.ISegment;
+import org.apache.kylin.metadata.model.ISegment;
 import org.apache.kylin.cube.cuboid.Cuboid;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/core-metadata/src/main/java/org/apache/kylin/metadata/model/IJoinedFlatTableDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/IJoinedFlatTableDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/IJoinedFlatTableDesc.java
index f3a4107..ffa2680 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/IJoinedFlatTableDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/IJoinedFlatTableDesc.java
@@ -37,4 +37,6 @@ public interface IJoinedFlatTableDesc {
     long getSourceOffsetEnd();
     
     TblColRef getDistributedBy();
+
+    ISegment getSegment();
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISegment.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISegment.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISegment.java
new file mode 100644
index 0000000..f69ae3f
--- /dev/null
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISegment.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.metadata.model;
+
+public interface ISegment {
+
+    public String getName();
+
+    public long getDateRangeStart();
+
+    public long getDateRangeEnd();
+
+    public long getSourceOffsetStart();
+
+    public long getSourceOffsetEnd();
+    
+    public DataModelDesc getModel();
+
+    public SegmentStatusEnum getStatus();
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder2.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder2.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder2.java
index 129d525..badf628 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder2.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder2.java
@@ -34,10 +34,12 @@ public class BatchMergeJobBuilder2 extends JobBuilderSupport {
     private static final Logger logger = LoggerFactory.getLogger(BatchMergeJobBuilder2.class);
 
     private final IMROutput2.IMRBatchMergeOutputSide2 outputSide;
+    private final IMRInput.IMRBatchMergeInputSide inputSide;
 
     public BatchMergeJobBuilder2(CubeSegment mergeSegment, String submitter) {
         super(mergeSegment, submitter);
         this.outputSide = MRUtil.getBatchMergeOutputSide2(seg);
+        this.inputSide = MRUtil.getBatchMergeInputSide(seg);
     }
 
     public CubingJob build() {
@@ -55,6 +57,7 @@ public class BatchMergeJobBuilder2 extends JobBuilderSupport {
         }
 
         // Phase 1: Merge Dictionary
+        inputSide.addStepPhase1_MergeDictionary(result);
         result.addTask(createMergeDictionaryStep(mergingSegmentIds));
         result.addTask(createMergeStatisticsStep(cubeSegment, mergingSegmentIds, getStatisticsPath(jobId)));
         outputSide.addStepPhase1_MergeDictionary(result);

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/engine-mr/src/main/java/org/apache/kylin/engine/mr/IMRInput.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/IMRInput.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/IMRInput.java
index 582052f..62cede9 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/IMRInput.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/IMRInput.java
@@ -21,6 +21,7 @@ package org.apache.kylin.engine.mr;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.kylin.job.execution.DefaultChainedExecutable;
 import org.apache.kylin.metadata.model.IJoinedFlatTableDesc;
+import org.apache.kylin.metadata.model.ISegment;
 import org.apache.kylin.metadata.model.TableDesc;
 
 /**
@@ -34,6 +35,9 @@ public interface IMRInput {
     /** Return an InputFormat that reads from specified table. */
     public IMRTableInputFormat getTableInputFormat(TableDesc table);
 
+    /** Return a helper to participate in batch cubing merge job flow. */
+    public IMRBatchMergeInputSide getBatchMergeInputSide(ISegment seg);
+
     /**
      * Utility that configures mapper to read from a table.
      */
@@ -67,4 +71,10 @@ public interface IMRInput {
         public void addStepPhase4_Cleanup(DefaultChainedExecutable jobFlow);
     }
 
+    public interface IMRBatchMergeInputSide {
+
+        /** Add step that executes before merge dictionary and before merge cube. */
+        public void addStepPhase1_MergeDictionary(DefaultChainedExecutable jobFlow);
+
+    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/engine-mr/src/main/java/org/apache/kylin/engine/mr/MRUtil.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/MRUtil.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/MRUtil.java
index 2c3b77f..67eef5e 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/MRUtil.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/MRUtil.java
@@ -71,6 +71,10 @@ public class MRUtil {
         return StorageFactory.createEngineAdapter(seg, IMROutput2.class).getBatchMergeOutputSide(seg);
     }
 
+    public static IMRInput.IMRBatchMergeInputSide getBatchMergeInputSide(CubeSegment seg) {
+        return SourceFactory.createEngineAdapter(seg, IMRInput.class).getBatchMergeInputSide(seg);
+    }
+
     // use this method instead of ToolRunner.run() because ToolRunner.run() is not thread-sale
     // Refer to: http://stackoverflow.com/questions/22462665/is-hadoops-toorunner-thread-safe
     public static int runMRJob(Tool tool, String[] args) throws Exception {

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/examples/test_case_data/localmeta/model_desc/test_streaming_table_model_desc.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/model_desc/test_streaming_table_model_desc.json b/examples/test_case_data/localmeta/model_desc/test_streaming_table_model_desc.json
index cfb889a..e6977e1 100644
--- a/examples/test_case_data/localmeta/model_desc/test_streaming_table_model_desc.json
+++ b/examples/test_case_data/localmeta/model_desc/test_streaming_table_model_desc.json
@@ -4,7 +4,7 @@
   "name": "test_streaming_table_model_desc",
   "dimensions": [
     {
-      "table": "default.streaming_table",
+      "table": "DEFAULT.STREAMING_TABLE",
       "columns": [
         "minute_start",
         "hour_start",
@@ -20,10 +20,10 @@
     "item_count"
   ],
   "last_modified": 0,
-  "fact_table": "default.streaming_table",
+  "fact_table": "DEFAULT.STREAMING_TABLE",
   "filter_condition": null,
   "partition_desc": {
-    "partition_date_column": "default.streaming_table.minute_start",
+    "partition_date_column": "DEFAULT.STREAMING_TABLE.minute_start",
     "partition_date_start": 0,
     "partition_type": "APPEND"
   }

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
index 9490560..cfa9b45 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *     http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -18,59 +18,164 @@
 
 package org.apache.kylin.provision;
 
-import java.io.File;
-import java.io.IOException;
-import java.util.UUID;
-
+import org.I0Itec.zkclient.ZkConnection;
 import org.apache.commons.lang3.StringUtils;
+import org.apache.kafka.common.requests.MetadataResponse;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.ClassUtil;
-import org.apache.kylin.common.util.DateFormat;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
-import org.apache.kylin.engine.streaming.OneOffStreamingBuilder;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.CubeUpdate;
+import org.apache.kylin.engine.EngineFactory;
 import org.apache.kylin.engine.streaming.StreamingConfig;
 import org.apache.kylin.engine.streaming.StreamingManager;
 import org.apache.kylin.job.DeployUtil;
-import org.apache.kylin.job.streaming.KafkaDataLoader;
-import org.apache.kylin.metadata.realization.RealizationType;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.execution.AbstractExecutable;
+import org.apache.kylin.job.execution.DefaultChainedExecutable;
+import org.apache.kylin.job.execution.ExecutableState;
+import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
+import org.apache.kylin.job.manager.ExecutableManager;
+import org.apache.kylin.job.streaming.Kafka10DataLoader;
 import org.apache.kylin.source.kafka.KafkaConfigManager;
+import org.apache.kylin.source.kafka.config.BrokerConfig;
 import org.apache.kylin.source.kafka.config.KafkaConfig;
-import org.apache.kylin.storage.hbase.util.StorageCleanupJob;
+import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
+import org.junit.Assert;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.File;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.TimeZone;
+import java.util.UUID;
+
 /**
  *  for streaming cubing case "test_streaming_table"
  */
 public class BuildCubeWithStream {
 
-    private static final Logger logger = LoggerFactory.getLogger(BuildCubeWithStream.class);
-    private static final String cubeName = "test_streaming_table_cube";
-    private static final long startTime = DateFormat.stringToMillis("2015-01-01 00:00:00");
-    private static final long endTime = DateFormat.stringToMillis("2015-01-03 00:00:00");
-    private static final long batchInterval = 16 * 60 * 60 * 1000;//16 hours
+    private static final Logger logger = LoggerFactory.getLogger(org.apache.kylin.provision.BuildCubeWithStream.class);
 
-    private KylinConfig kylinConfig;
+    private CubeManager cubeManager;
+    private DefaultScheduler scheduler;
+    protected ExecutableManager jobService;
+    private static final String cubeName = "test_streaming_table_cube";
 
-    public static void main(String[] args) throws Exception {
+    private KafkaConfig kafkaConfig;
+    private MockKafka kafkaServer;
 
-        try {
-            beforeClass();
+    public void before() throws Exception {
+        deployEnv();
 
-            BuildCubeWithStream buildCubeWithStream = new BuildCubeWithStream();
-            buildCubeWithStream.before();
-            buildCubeWithStream.build();
-            logger.info("Build is done");
-            buildCubeWithStream.cleanup();
-            logger.info("Going to exit");
-            System.exit(0);
-        } catch (Exception e) {
-            logger.error("error", e);
-            System.exit(1);
+        final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+        jobService = ExecutableManager.getInstance(kylinConfig);
+        scheduler = DefaultScheduler.createInstance();
+        scheduler.init(new JobEngineConfig(kylinConfig), new ZookeeperJobLock());
+        if (!scheduler.hasStarted()) {
+            throw new RuntimeException("scheduler has not been started");
         }
+        cubeManager = CubeManager.getInstance(kylinConfig);
+
+        final CubeInstance cubeInstance = CubeManager.getInstance(kylinConfig).getCube(cubeName);
+        final String factTable = cubeInstance.getFactTable();
 
+        final StreamingManager streamingManager = StreamingManager.getInstance(kylinConfig);
+        final StreamingConfig streamingConfig = streamingManager.getStreamingConfig(factTable);
+        kafkaConfig = KafkaConfigManager.getInstance(kylinConfig).getKafkaConfig(streamingConfig.getName());
+
+        String topicName = UUID.randomUUID().toString();
+        String localIp = NetworkUtils.getLocalIp();
+        BrokerConfig brokerConfig = kafkaConfig.getKafkaClusterConfigs().get(0).getBrokerConfigs().get(0);
+        brokerConfig.setHost(localIp);
+        kafkaConfig.setTopic(topicName);
+        KafkaConfigManager.getInstance(kylinConfig).saveKafkaConfig(kafkaConfig);
+
+        startEmbeddedKafka(topicName, brokerConfig);
+    }
+
+    private void startEmbeddedKafka(String topicName, BrokerConfig brokerConfig){
+        //Start mock Kakfa
+        String zkConnectionStr = "sandbox:2181";
+        ZkConnection zkConnection = new ZkConnection(zkConnectionStr);
+        // Assert.assertEquals(ZooKeeper.States.CONNECTED, zkConnection.getZookeeperState());
+        kafkaServer = new MockKafka(zkConnection, brokerConfig.getPort(), brokerConfig.getId());
+        kafkaServer.start();
+
+        kafkaServer.createTopic(topicName, 3, 1);
+        kafkaServer.waitTopicUntilReady(topicName);
+
+        MetadataResponse.TopicMetadata topicMetadata = kafkaServer.fetchTopicMeta(topicName);
+        Assert.assertEquals(topicName,topicMetadata.topic());
+    }
+
+
+    private void generateStreamData(long startTime, long endTime, int numberOfRecords) throws IOException {
+        Kafka10DataLoader dataLoader = new Kafka10DataLoader(kafkaConfig);
+        DeployUtil.prepareTestDataForStreamingCube(startTime, endTime, numberOfRecords, cubeName, dataLoader);
+        logger.info("Test data inserted into Kafka");
+    }
+
+    private void clearSegment(String cubeName) throws Exception {
+        CubeInstance cube = cubeManager.getCube(cubeName);
+        // remove all existing segments
+        CubeUpdate cubeBuilder = new CubeUpdate(cube);
+        cubeBuilder.setToRemoveSegs(cube.getSegments().toArray(new CubeSegment[cube.getSegments().size()]));
+        cubeManager.updateCube(cubeBuilder);
+    }
+
+    public void build() throws Exception {
+        clearSegment(cubeName);
+        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
+        f.setTimeZone(TimeZone.getTimeZone("GMT"));
+        long date1 = 0;
+        long date2 = f.parse("2013-01-01").getTime();
+
+        int numberOfRecrods1 = 10000;
+        generateStreamData(date1, date2, numberOfRecrods1);
+        buildSegment(cubeName, 0, Long.MAX_VALUE);
+
+        long date3 = f.parse("2013-04-01").getTime();
+        int numberOfRecrods2 = 5000;
+        generateStreamData(date2, date3, numberOfRecrods2);
+        buildSegment(cubeName, 0, Long.MAX_VALUE);
+
+        //merge
+        mergeSegment(cubeName, 0, 15000);
+
+    }
+
+    private String mergeSegment(String cubeName, long startOffset, long endOffset) throws Exception {
+        CubeSegment segment = cubeManager.mergeSegments(cubeManager.getCube(cubeName), 0, 0, startOffset, endOffset, true);
+        DefaultChainedExecutable job = EngineFactory.createBatchMergeJob(segment, "TEST");
+        jobService.addJob(job);
+        waitForJob(job.getId());
+        return job.getId();
+    }
+
+    private String refreshSegment(String cubeName, long startOffset, long endOffset) throws Exception {
+        CubeSegment segment = cubeManager.refreshSegment(cubeManager.getCube(cubeName), 0, 0, startOffset, endOffset);
+        DefaultChainedExecutable job = EngineFactory.createBatchCubingJob(segment, "TEST");
+        jobService.addJob(job);
+        waitForJob(job.getId());
+        return job.getId();
+    }
+
+    private String buildSegment(String cubeName, long startOffset, long endOffset) throws Exception {
+        CubeSegment segment = cubeManager.appendSegment(cubeManager.getCube(cubeName), 0, 0, startOffset, endOffset);
+        DefaultChainedExecutable job = EngineFactory.createBatchCubingJob(segment, "TEST");
+        jobService.addJob(job);
+        waitForJob(job.getId());
+        return job.getId();
+    }
+
+    protected void deployEnv() throws IOException {
+        DeployUtil.overrideJobJarLocations();
+        DeployUtil.initCliWorkDir();
+        DeployUtil.deployMetadata();
     }
 
     public static void beforeClass() throws Exception {
@@ -83,44 +188,54 @@ public class BuildCubeWithStream {
         HBaseMetadataTestCase.staticCreateTestMetadata(HBaseMetadataTestCase.SANDBOX_TEST_DATA);
     }
 
-    protected void deployEnv() throws IOException {
-        DeployUtil.overrideJobJarLocations();
+    public static void afterClass() throws Exception {
+        HBaseMetadataTestCase.staticCleanupTestMetadata();
     }
 
-    public void before() throws Exception {
-        deployEnv();
+    public void after(){
+        kafkaServer.stop();
+    }
 
-        kylinConfig = KylinConfig.getInstanceFromEnv();
-        final CubeInstance cubeInstance = CubeManager.getInstance(kylinConfig).getCube(cubeName);
-        final String factTable = cubeInstance.getFactTable();
-        final StreamingConfig config = StreamingManager.getInstance(kylinConfig).getStreamingConfig(factTable);
+    protected void waitForJob(String jobId) {
+        while (true) {
+            AbstractExecutable job = jobService.getJob(jobId);
+            if (job.getStatus() == ExecutableState.SUCCEED || job.getStatus() == ExecutableState.ERROR) {
+                break;
+            } else {
+                try {
+                    Thread.sleep(5000);
+                } catch (InterruptedException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+    }
 
-        //Use a random topic for kafka data stream
-        KafkaConfig streamingConfig = KafkaConfigManager.getInstance(kylinConfig).getKafkaConfig(config.getName());
-        streamingConfig.setTopic(UUID.randomUUID().toString());
-        KafkaConfigManager.getInstance(kylinConfig).saveKafkaConfig(streamingConfig);
+    public static void main(String[] args) throws Exception {
+        try {
+            beforeClass();
 
-        DeployUtil.prepareTestDataForStreamingCube(startTime, endTime, cubeName, new KafkaDataLoader(streamingConfig));
-    }
+            BuildCubeWithStream buildCubeWithStream = new BuildCubeWithStream();
+            buildCubeWithStream.before();
+            buildCubeWithStream.build();
+            logger.info("Build is done");
+            buildCubeWithStream.after();
+            afterClass();
+            logger.info("Going to exit");
+            System.exit(0);
+        } catch (Exception e) {
+            logger.error("error", e);
+            System.exit(1);
+        }
 
-    public void cleanup() throws Exception {
-        cleanupOldStorage();
-        HBaseMetadataTestCase.staticCleanupTestMetadata();
     }
 
     protected int cleanupOldStorage() throws Exception {
         String[] args = { "--delete", "true" };
 
-        StorageCleanupJob cli = new StorageCleanupJob();
-        cli.execute(args);
+//        KapStorageCleanupCLI cli = new KapStorageCleanupCLI();
+//        cli.execute(args);
         return 0;
     }
 
-    public void build() throws Exception {
-        logger.info("start time:" + startTime + " end time:" + endTime + " batch interval:" + batchInterval + " batch count:" + ((endTime - startTime) / batchInterval));
-        for (long start = startTime; start < endTime; start += batchInterval) {
-            logger.info(String.format("build batch:{%d, %d}", start, start + batchInterval));
-            new OneOffStreamingBuilder(RealizationType.CUBE, cubeName, start, start + batchInterval).build().run();
-        }
-    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/kylin-it/src/test/java/org/apache/kylin/provision/MockKafka.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/MockKafka.java b/kylin-it/src/test/java/org/apache/kylin/provision/MockKafka.java
new file mode 100644
index 0000000..3f47923
--- /dev/null
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/MockKafka.java
@@ -0,0 +1,191 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+package org.apache.kylin.provision;
+
+import java.io.UnsupportedEncodingException;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Properties;
+import java.util.Random;
+import java.util.UUID;
+
+import org.I0Itec.zkclient.ZkClient;
+import org.I0Itec.zkclient.ZkConnection;
+import org.I0Itec.zkclient.exception.ZkMarshallingError;
+import org.I0Itec.zkclient.serialize.ZkSerializer;
+import org.apache.kafka.common.requests.MetadataResponse;
+
+import kafka.admin.AdminUtils;
+import kafka.server.KafkaConfig;
+import kafka.server.KafkaServerStartable;
+import kafka.utils.ZkUtils;
+
+public class MockKafka {
+    private static Properties createProperties(ZkConnection zkServerConnection, String logDir, String port, String brokerId) {
+        Properties properties = new Properties();
+        properties.put("port", port);
+        properties.put("broker.id", brokerId);
+        properties.put("log.dirs", logDir);
+        properties.put("host.name", "localhost");
+        properties.put("offsets.topic.replication.factor", "1");
+        properties.put("delete.topic.enable", "true");
+        properties.put("zookeeper.connect", zkServerConnection.getServers());
+        String ip = NetworkUtils.getLocalIp();
+        properties.put("listeners", "PLAINTEXT://" + ip + ":" + port);
+        properties.put("advertised.listeners", "PLAINTEXT://" + ip + ":" + port);
+        return properties;
+    }
+
+    private KafkaServerStartable kafkaServer;
+
+    private ZkConnection zkConnection;
+
+    public MockKafka(ZkConnection zkServerConnection) {
+        this(zkServerConnection, System.getProperty("java.io.tmpdir") + "/" + UUID.randomUUID().toString(), "9092", "1");
+        start();
+    }
+
+    private MockKafka(Properties properties) {
+        KafkaConfig kafkaConfig = new KafkaConfig(properties);
+        kafkaServer = new KafkaServerStartable(kafkaConfig);
+    }
+
+    public MockKafka(ZkConnection zkServerConnection, int port, int brokerId) {
+        this(zkServerConnection, System.getProperty("java.io.tmpdir") + "/" + UUID.randomUUID().toString(), String.valueOf(port), String.valueOf(brokerId));
+        start();
+    }
+
+    private MockKafka(ZkConnection zkServerConnection, String logDir, String port, String brokerId) {
+        this(createProperties(zkServerConnection, logDir, port, brokerId));
+        this.zkConnection = zkServerConnection;
+        System.out.println(String.format("Kafka %s:%s dir:%s", kafkaServer.serverConfig().brokerId(), kafkaServer.serverConfig().port(), kafkaServer.serverConfig().logDirs()));
+    }
+
+    public void createTopic(String topic, int partition, int replication) {
+        ZkClient zkClient = new ZkClient(zkConnection);
+        ZkUtils zkUtils = new ZkUtils(zkClient, zkConnection, false);
+        zkClient.setZkSerializer(new ZKStringSerializer());
+        AdminUtils.createTopic(zkUtils, topic, partition, replication, new Properties(), null);
+        zkClient.close();
+    }
+
+    public void createTopic(String topic) {
+        this.createTopic(topic, 1, 1);
+    }
+
+    public MetadataResponse.TopicMetadata fetchTopicMeta(String topic) {
+        ZkClient zkClient = new ZkClient(zkConnection);
+        ZkUtils zkUtils = new ZkUtils(zkClient, zkConnection, false);
+        zkClient.setZkSerializer(new ZKStringSerializer());
+        MetadataResponse.TopicMetadata topicMetadata = AdminUtils.fetchTopicMetadataFromZk(topic, zkUtils);
+        zkClient.close();
+        return topicMetadata;
+    }
+
+    /**
+     * Delete may not work
+     *
+     * @param topic
+     */
+    public void deleteTopic(String topic) {
+        ZkClient zkClient = new ZkClient(zkConnection);
+        ZkUtils zkUtils = new ZkUtils(zkClient, zkConnection, false);
+        zkClient.setZkSerializer(new ZKStringSerializer());
+        AdminUtils.deleteTopic(zkUtils, topic);
+        zkClient.close();
+    }
+
+    public String getConnectionString() {
+        return String.format("%s:%d", kafkaServer.serverConfig().hostName(), kafkaServer.serverConfig().port());
+    }
+
+    public void start() {
+        kafkaServer.startup();
+        System.out.println("embedded kafka is up");
+    }
+
+    public void stop() {
+        kafkaServer.shutdown();
+        System.out.println("embedded kafka down");
+    }
+
+    public MetadataResponse.TopicMetadata waitTopicUntilReady(String topic) {
+        boolean isReady = false;
+        MetadataResponse.TopicMetadata topicMeta = null;
+        while (!isReady) {
+            Random random = new Random();
+            topicMeta = this.fetchTopicMeta(topic);
+            List<MetadataResponse.PartitionMetadata> partitionsMetadata = topicMeta.partitionMetadata();
+            Iterator<MetadataResponse.PartitionMetadata> iterator = partitionsMetadata.iterator();
+            boolean hasGotLeader = true;
+            boolean hasGotReplica = true;
+            while (iterator.hasNext()) {
+                MetadataResponse.PartitionMetadata partitionMeta = iterator.next();
+                hasGotLeader &= (!partitionMeta.leader().isEmpty());
+                if (partitionMeta.leader().isEmpty()) {
+                    System.out.println("Partition leader is not ready, wait 1s.");
+                    break;
+                }
+                hasGotReplica &= (!partitionMeta.replicas().isEmpty());
+                if (partitionMeta.replicas().isEmpty()) {
+                    System.out.println("Partition replica is not ready, wait 1s.");
+                    break;
+                }
+            }
+            isReady = hasGotLeader & hasGotReplica;
+            if (!isReady) {
+                try {
+                    Thread.sleep(1000);
+                } catch (InterruptedException e) {
+                }
+            }
+        }
+        return topicMeta;
+    }
+
+    public String getZookeeperConnection() {
+        return this.zkConnection.getServers();
+    }
+}
+
+class ZKStringSerializer implements ZkSerializer {
+
+    @Override
+    public byte[] serialize(Object data) throws ZkMarshallingError {
+        byte[] bytes = null;
+        try {
+            bytes = data.toString().getBytes("UTF-8");
+        } catch (UnsupportedEncodingException e) {
+            throw new ZkMarshallingError(e);
+        }
+        return bytes;
+    }
+
+    @Override
+    public Object deserialize(byte[] bytes) throws ZkMarshallingError {
+        if (bytes == null)
+            return null;
+        else
+            try {
+                return new String(bytes, "UTF-8");
+            } catch (UnsupportedEncodingException e) {
+                throw new ZkMarshallingError(e);
+            }
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/kylin-it/src/test/java/org/apache/kylin/provision/NetworkUtils.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/NetworkUtils.java b/kylin-it/src/test/java/org/apache/kylin/provision/NetworkUtils.java
new file mode 100644
index 0000000..98f6d04
--- /dev/null
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/NetworkUtils.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+package org.apache.kylin.provision;
+
+import java.net.Inet4Address;
+import java.net.InetAddress;
+import java.net.NetworkInterface;
+import java.net.SocketException;
+import java.util.Enumeration;
+
+public class NetworkUtils {
+
+    public static String getLocalIp() {
+        try {
+            Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces();
+            while (interfaces.hasMoreElements()) {
+                NetworkInterface iface = interfaces.nextElement();
+                if (iface.isLoopback() || !iface.isUp() || iface.isVirtual() || iface.isPointToPoint())
+                    continue;
+                if (iface.getName().startsWith("vboxnet"))
+                    continue;
+
+                Enumeration<InetAddress> addresses = iface.getInetAddresses();
+                while (addresses.hasMoreElements()) {
+                    InetAddress addr = addresses.nextElement();
+                    final String ip = addr.getHostAddress();
+                    if (Inet4Address.class == addr.getClass())
+                        return ip;
+                }
+            }
+        } catch (SocketException e) {
+            throw new RuntimeException(e);
+        }
+        return null;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 38f9365..2be4231 100644
--- a/pom.xml
+++ b/pom.xml
@@ -55,7 +55,7 @@
 
         <!-- HBase versions -->
         <hbase-hadoop2.version>0.98.8-hadoop2</hbase-hadoop2.version>
-        <kafka.version>0.8.1</kafka.version>
+        <kafka.version>0.10.0.0</kafka.version>
 
         <!-- Hadoop deps, keep compatible with hadoop2.version -->
         <zookeeper.version>3.4.6</zookeeper.version>

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
index 520d7cc..09ac522 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
@@ -50,6 +50,7 @@ import org.apache.kylin.job.execution.ExecutableContext;
 import org.apache.kylin.job.execution.ExecuteResult;
 import org.apache.kylin.metadata.MetadataManager;
 import org.apache.kylin.metadata.model.IJoinedFlatTableDesc;
+import org.apache.kylin.metadata.model.ISegment;
 import org.apache.kylin.metadata.model.LookupDesc;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.slf4j.Logger;
@@ -69,6 +70,16 @@ public class HiveMRInput implements IMRInput {
         return new HiveTableInputFormat(table.getIdentity());
     }
 
+    @Override
+    public IMRBatchMergeInputSide getBatchMergeInputSide(ISegment seg) {
+        return new IMRBatchMergeInputSide() {
+            @Override
+            public void addStepPhase1_MergeDictionary(DefaultChainedExecutable jobFlow) {
+                // doing nothing
+            }
+        };
+    }
+
     public static class HiveTableInputFormat implements IMRTableInputFormat {
         final String dbName;
         final String tableName;

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/pom.xml
----------------------------------------------------------------------
diff --git a/source-kafka/pom.xml b/source-kafka/pom.xml
index 9393216..7f2a2e9 100644
--- a/source-kafka/pom.xml
+++ b/source-kafka/pom.xml
@@ -32,10 +32,11 @@
 
     </parent>
 
-    <properties>
-    </properties>
-
     <dependencies>
+        <dependency>
+            <groupId>org.apache.kylin</groupId>
+            <artifactId>kylin-engine-mr</artifactId>
+        </dependency>
 
         <dependency>
             <groupId>org.apache.kylin</groupId>
@@ -60,16 +61,10 @@
             <scope>provided</scope>
         </dependency>
         <dependency>
-            <groupId>org.apache.hive.hcatalog</groupId>
-            <artifactId>hive-hcatalog-core</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
             <groupId>junit</groupId>
             <artifactId>junit</artifactId>
             <scope>test</scope>
         </dependency>
     </dependencies>
 
-
 </project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaConfigManager.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaConfigManager.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaConfigManager.java
index d594873..cfdf316 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaConfigManager.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaConfigManager.java
@@ -1,35 +1,19 @@
 /*
- *
- *
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *
- *  contributor license agreements. See the NOTICE file distributed with
- *
- *  this work for additional information regarding copyright ownership.
- *
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *
- *  (the "License"); you may not use this file except in compliance with
- *
- *  the License. You may obtain a copy of the License at
- *
- *
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *
- *
- *  Unless required by applicable law or agreed to in writing, software
- *
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *
- *  See the License for the specific language governing permissions and
- *
- *  limitations under the License.
- *
- * /
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
  */
 
 package org.apache.kylin.source.kafka;

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
new file mode 100644
index 0000000..cfce137
--- /dev/null
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
@@ -0,0 +1,221 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+package org.apache.kylin.source.kafka;
+
+import com.google.common.base.Function;
+import com.google.common.collect.Lists;
+import org.apache.kylin.metadata.model.ISegment;
+import org.apache.kylin.source.kafka.hadoop.KafkaFlatTableJob;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.StreamingMessage;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.model.CubeJoinedFlatTableDesc;
+import org.apache.kylin.engine.mr.IMRInput;
+import org.apache.kylin.engine.mr.JobBuilderSupport;
+import org.apache.kylin.engine.mr.common.BatchConstants;
+import org.apache.kylin.engine.mr.common.MapReduceExecutable;
+import org.apache.kylin.engine.mr.steps.CubingExecutableUtil;
+import org.apache.kylin.job.JoinedFlatTable;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.execution.DefaultChainedExecutable;
+import org.apache.kylin.metadata.model.ColumnDesc;
+import org.apache.kylin.metadata.model.IJoinedFlatTableDesc;
+import org.apache.kylin.metadata.model.TableDesc;
+import org.apache.kylin.metadata.model.TblColRef;
+import org.apache.kylin.source.kafka.config.KafkaConfig;
+
+import javax.annotation.Nullable;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.List;
+
+public class KafkaMRInput implements IMRInput {
+
+    CubeSegment cubeSegment;
+
+    @Override
+    public IMRBatchCubingInputSide getBatchCubingInputSide(IJoinedFlatTableDesc flatDesc) {
+        this.cubeSegment = (CubeSegment)flatDesc.getSegment();
+        return new BatchCubingInputSide(cubeSegment);
+    }
+
+    @Override
+    public IMRTableInputFormat getTableInputFormat(TableDesc table) {
+        KafkaConfigManager kafkaConfigManager = KafkaConfigManager.getInstance(KylinConfig.getInstanceFromEnv());
+        KafkaConfig kafkaConfig = kafkaConfigManager.getKafkaConfig(table.getIdentity());
+        List<TblColRef> columns = Lists.transform(Arrays.asList(table.getColumns()), new Function<ColumnDesc, TblColRef>() {
+            @Nullable
+            @Override
+            public TblColRef apply(ColumnDesc input) {
+                return input.getRef();
+            }
+        });
+
+        return new KafkaTableInputFormat(cubeSegment, columns, kafkaConfig, null);
+    }
+
+    @Override
+    public IMRBatchMergeInputSide getBatchMergeInputSide(ISegment seg) {
+        return new KafkaMRBatchMergeInputSide((CubeSegment) seg);
+    }
+
+    public static class KafkaTableInputFormat implements IMRTableInputFormat {
+        private final CubeSegment cubeSegment;
+        private List<TblColRef> columns;
+        private StreamingParser streamingParser;
+        private KafkaConfig kafkaConfig;
+        private final JobEngineConfig conf;
+
+        public KafkaTableInputFormat(CubeSegment cubeSegment, List<TblColRef> columns, KafkaConfig kafkaConfig, JobEngineConfig conf) {
+            this.cubeSegment = cubeSegment;
+            this.columns = columns;
+            this.kafkaConfig = kafkaConfig;
+            this.conf = conf;
+        }
+
+        @Override
+        public void configureJob(Job job) {
+            job.setInputFormatClass(SequenceFileInputFormat.class);
+            job.setMapOutputValueClass(Text.class);
+            String jobId = job.getConfiguration().get(BatchConstants.ARG_CUBING_JOB_ID);
+            IJoinedFlatTableDesc flatHiveTableDesc = new CubeJoinedFlatTableDesc(cubeSegment);
+            String inputPath = JoinedFlatTable.getTableDir(flatHiveTableDesc, JobBuilderSupport.getJobWorkingDir(conf, jobId));
+            try {
+                FileInputFormat.addInputPath(job, new Path(inputPath));
+            } catch (IOException e) {
+                throw new IllegalStateException(e);
+            }
+        }
+
+        @Override
+        public String[] parseMapperInput(Object mapperInput) {
+            if (streamingParser == null) {
+                try {
+                    streamingParser = StreamingParser.getStreamingParser(kafkaConfig.getParserName(), kafkaConfig.getParserProperties(), columns);
+                } catch (ReflectiveOperationException e) {
+                    throw new IllegalArgumentException();
+                }
+            }
+            Text text = (Text) mapperInput;
+            ByteBuffer buffer = ByteBuffer.wrap(text.getBytes(), 0, text.getLength()).slice();
+            StreamingMessage streamingMessage = streamingParser.parse(buffer);
+            return streamingMessage.getData().toArray(new String[streamingMessage.getData().size()]);
+        }
+
+    }
+
+    public static class BatchCubingInputSide implements IMRBatchCubingInputSide {
+
+        final JobEngineConfig conf;
+        final CubeSegment seg;
+        private String outputPath;
+
+        public BatchCubingInputSide(CubeSegment seg) {
+            this.conf = new JobEngineConfig(KylinConfig.getInstanceFromEnv());
+            this.seg = seg;
+        }
+
+        @Override
+        public void addStepPhase1_CreateFlatTable(DefaultChainedExecutable jobFlow) {
+            jobFlow.addTask(createUpdateSegmentOffsetStep(jobFlow.getId()));
+            jobFlow.addTask(createSaveKafkaDataStep(jobFlow.getId()));
+        }
+
+        public SeekOffsetStep createUpdateSegmentOffsetStep(String jobId) {
+            final SeekOffsetStep result = new SeekOffsetStep();
+            result.setName("Seek and update offset step");
+
+            CubingExecutableUtil.setCubeName(seg.getRealization().getName(), result.getParams());
+            CubingExecutableUtil.setSegmentId(seg.getUuid(), result.getParams());
+            CubingExecutableUtil.setCubingJobId(jobId, result.getParams());
+
+            return result;
+        }
+
+        private MapReduceExecutable createSaveKafkaDataStep(String jobId) {
+            MapReduceExecutable result = new MapReduceExecutable();
+
+            IJoinedFlatTableDesc flatHiveTableDesc = new CubeJoinedFlatTableDesc(seg);
+            outputPath = JoinedFlatTable.getTableDir(flatHiveTableDesc, JobBuilderSupport.getJobWorkingDir(conf, jobId));
+            result.setName("Save data from Kafka");
+            result.setMapReduceJobClass(KafkaFlatTableJob.class);
+            JobBuilderSupport jobBuilderSupport = new JobBuilderSupport(seg, "system");
+            StringBuilder cmd = new StringBuilder();
+            jobBuilderSupport.appendMapReduceParameters(cmd);
+            JobBuilderSupport.appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getRealization().getName());
+            JobBuilderSupport.appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, outputPath);
+            JobBuilderSupport.appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_NAME, seg.getName());
+            JobBuilderSupport.appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_Save_Kafka_Data_" + seg.getRealization().getName() + "_Step");
+
+            result.setMapReduceParams(cmd.toString());
+            return result;
+        }
+
+        @Override
+        public void addStepPhase4_Cleanup(DefaultChainedExecutable jobFlow) {
+            final UpdateTimeRangeStep result = new UpdateTimeRangeStep();
+            result.setName("Update Segment Time Range");
+            CubingExecutableUtil.setCubeName(seg.getRealization().getName(), result.getParams());
+            CubingExecutableUtil.setSegmentId(seg.getUuid(), result.getParams());
+            CubingExecutableUtil.setCubingJobId(jobFlow.getId(), result.getParams());
+            JobBuilderSupport jobBuilderSupport = new JobBuilderSupport(seg, "SYSTEM");
+            result.getParams().put(BatchConstants.CFG_OUTPUT_PATH, jobBuilderSupport.getFactDistinctColumnsPath(jobFlow.getId()));
+            jobFlow.addTask(result);
+
+        }
+
+        @Override
+        public IMRTableInputFormat getFlatTableInputFormat() {
+            KafkaConfigManager kafkaConfigManager = KafkaConfigManager.getInstance(KylinConfig.getInstanceFromEnv());
+            KafkaConfig kafkaConfig = kafkaConfigManager.getKafkaConfig(seg.getRealization().getFactTable());
+            List<TblColRef> columns = new CubeJoinedFlatTableDesc(seg).getAllColumns();
+
+            return new KafkaTableInputFormat(seg, columns, kafkaConfig, conf);
+
+        }
+
+    }
+
+    class KafkaMRBatchMergeInputSide implements IMRBatchMergeInputSide {
+
+        private CubeSegment cubeSegment;
+
+        KafkaMRBatchMergeInputSide(CubeSegment cubeSegment) {
+            this.cubeSegment = cubeSegment;
+        }
+
+        @Override
+        public void addStepPhase1_MergeDictionary(DefaultChainedExecutable jobFlow) {
+
+            final MergeOffsetStep result = new MergeOffsetStep();
+            result.setName("Merge offset step");
+
+            CubingExecutableUtil.setCubeName(cubeSegment.getRealization().getName(), result.getParams());
+            CubingExecutableUtil.setSegmentId(cubeSegment.getUuid(), result.getParams());
+            CubingExecutableUtil.setCubingJobId(jobFlow.getId(), result.getParams());
+            jobFlow.addTask(result);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSource.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSource.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSource.java
new file mode 100644
index 0000000..d039583
--- /dev/null
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSource.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.source.kafka;
+
+import com.google.common.collect.Lists;
+import org.apache.kylin.engine.mr.IMRInput;
+import org.apache.kylin.engine.streaming.StreamingConfig;
+import org.apache.kylin.metadata.model.TableDesc;
+import org.apache.kylin.source.ISource;
+import org.apache.kylin.source.ReadableTable;
+import org.apache.kylin.source.kafka.config.KafkaConfig;
+
+import java.util.List;
+
+//used by reflection
+public class KafkaSource implements ISource {
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public <I> I adaptToBuildEngine(Class<I> engineInterface) {
+        if (engineInterface == IMRInput.class) {
+            return (I) new KafkaMRInput();
+        } else {
+            throw new RuntimeException("Cannot adapt to " + engineInterface);
+        }
+    }
+
+    @Override
+    public ReadableTable createReadableTable(TableDesc tableDesc) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public List<String> getMRDependentResources(TableDesc table) {
+        List<String> dependentResources = Lists.newArrayList();
+        dependentResources.add(KafkaConfig.concatResourcePath(table.getIdentity()));
+        dependentResources.add(StreamingConfig.concatResourcePath(table.getIdentity()));
+        return dependentResources;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaStreamingInput.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaStreamingInput.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaStreamingInput.java
index c3bdb75..de42689 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaStreamingInput.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaStreamingInput.java
@@ -1,36 +1,20 @@
 /*
- *
- *
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *
- *  contributor license agreements. See the NOTICE file distributed with
- *
- *  this work for additional information regarding copyright ownership.
- *
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *
- *  (the "License"); you may not use this file except in compliance with
- *
- *  the License. You may obtain a copy of the License at
- *
- *
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *
- *
- *  Unless required by applicable law or agreed to in writing, software
- *
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *
- *  See the License for the specific language governing permissions and
- *
- *  limitations under the License.
- *
- * /
- */
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
 package org.apache.kylin.source.kafka;
 
 import java.util.List;
@@ -40,6 +24,9 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 
+import com.google.common.base.Function;
+import kafka.cluster.BrokerEndPoint;
+import org.apache.kafka.common.protocol.SecurityProtocol;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.common.util.StreamingBatch;
@@ -66,6 +53,8 @@ import kafka.javaapi.FetchResponse;
 import kafka.javaapi.PartitionMetadata;
 import kafka.message.MessageAndOffset;
 
+import javax.annotation.Nullable;
+
 @SuppressWarnings("unused")
 public class KafkaStreamingInput implements IStreamingInput {
 
@@ -151,8 +140,16 @@ public class KafkaStreamingInput implements IStreamingInput {
                 if (partitionMetadata.errorCode() != 0) {
                     logger.warn("PartitionMetadata errorCode: " + partitionMetadata.errorCode());
                 }
-                replicaBrokers = partitionMetadata.replicas();
-                return partitionMetadata.leader();
+                replicaBrokers = Lists.transform(partitionMetadata.replicas(), new Function<BrokerEndPoint, Broker>() {
+                    @Nullable
+                    @Override
+                    public Broker apply(@Nullable BrokerEndPoint brokerEndPoint) {
+                        return new Broker(brokerEndPoint, SecurityProtocol.PLAINTEXT);
+                    }
+                });
+                BrokerEndPoint leaderEndpoint = partitionMetadata.leader();
+
+                return new Broker(leaderEndpoint, SecurityProtocol.PLAINTEXT);
             } else {
                 return null;
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/MergeOffsetStep.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/MergeOffsetStep.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/MergeOffsetStep.java
new file mode 100644
index 0000000..a21b980
--- /dev/null
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/MergeOffsetStep.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+package org.apache.kylin.source.kafka;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import com.google.common.collect.Maps;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.CubeUpdate;
+import org.apache.kylin.engine.mr.steps.CubingExecutableUtil;
+import org.apache.kylin.job.exception.ExecuteException;
+import org.apache.kylin.job.execution.AbstractExecutable;
+import org.apache.kylin.job.execution.ExecutableContext;
+import org.apache.kylin.job.execution.ExecuteResult;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.kylin.source.kafka.util.KafkaOffsetMapping;
+
+/**
+ */
+public class MergeOffsetStep extends AbstractExecutable {
+
+    private static final Logger logger = LoggerFactory.getLogger(MergeOffsetStep.class);
+    public MergeOffsetStep() {
+        super();
+    }
+
+    @Override
+    protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
+        final CubeManager cubeManager = CubeManager.getInstance(context.getConfig());
+        final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams()));
+        final CubeSegment segment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));
+
+        List<CubeSegment> mergingSegs = cube.getMergingSegments(segment);
+        Map<Integer, Long> mergedStartOffsets = Maps.newHashMap();
+        Map<Integer, Long> mergedEndOffsets = Maps.newHashMap();
+
+        long dateRangeStart = Long.MAX_VALUE, dateRangeEnd = 0;
+        for (CubeSegment seg: mergingSegs) {
+            Map<Integer, Long> startOffsets = KafkaOffsetMapping.parseOffsetStart(seg);
+            Map<Integer, Long> endOffsets = KafkaOffsetMapping.parseOffsetEnd(seg);
+
+            for (Integer partition : startOffsets.keySet()) {
+                long currentStart = mergedStartOffsets.get(partition) != null ? Long.valueOf(mergedStartOffsets.get(partition)) : Long.MAX_VALUE;
+                long currentEnd = mergedEndOffsets.get(partition) != null ? Long.valueOf(mergedEndOffsets.get(partition)) : 0;
+                mergedStartOffsets.put(partition, Math.min(currentStart, startOffsets.get(partition)));
+                mergedEndOffsets.put(partition, Math.max(currentEnd, endOffsets.get(partition)));
+            }
+            dateRangeStart = Math.min(dateRangeStart, seg.getDateRangeStart());
+            dateRangeEnd = Math.max(dateRangeEnd, seg.getDateRangeEnd());
+        }
+
+        KafkaOffsetMapping.saveOffsetStart(segment, mergedStartOffsets);
+        KafkaOffsetMapping.saveOffsetEnd(segment, mergedEndOffsets);
+        segment.setDateRangeStart(dateRangeStart);
+        segment.setDateRangeEnd(dateRangeEnd);
+
+        CubeUpdate cubeBuilder = new CubeUpdate(cube);
+        cubeBuilder.setToUpdateSegs(segment);
+        try {
+            cubeManager.updateCube(cubeBuilder);
+            return new ExecuteResult(ExecuteResult.State.SUCCEED, "succeed");
+        } catch (IOException e) {
+            logger.error("fail to update cube segment offset", e);
+            return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/SeekOffsetStep.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/SeekOffsetStep.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/SeekOffsetStep.java
new file mode 100644
index 0000000..5dca93f
--- /dev/null
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/SeekOffsetStep.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+package org.apache.kylin.source.kafka;
+
+import org.apache.kylin.source.kafka.util.KafkaClient;
+import org.apache.kylin.source.kafka.util.KafkaOffsetMapping;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.PartitionInfo;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.CubeUpdate;
+import org.apache.kylin.engine.mr.steps.CubingExecutableUtil;
+import org.apache.kylin.job.exception.ExecuteException;
+import org.apache.kylin.job.execution.AbstractExecutable;
+import org.apache.kylin.job.execution.ExecutableContext;
+import org.apache.kylin.job.execution.ExecuteResult;
+import org.apache.kylin.source.kafka.config.KafkaConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+/**
+ */
+public class SeekOffsetStep extends AbstractExecutable {
+
+    private static final Logger logger = LoggerFactory.getLogger(SeekOffsetStep.class);
+
+    public SeekOffsetStep() {
+        super();
+    }
+
+    @Override
+    protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
+        final CubeManager cubeManager = CubeManager.getInstance(context.getConfig());
+        final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams()));
+        final CubeSegment segment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));
+
+        Map<Integer, Long> startOffsets = KafkaOffsetMapping.parseOffsetStart(segment);
+        Map<Integer, Long> endOffsets = KafkaOffsetMapping.parseOffsetEnd(segment);
+
+        if (startOffsets.size() > 0 && endOffsets.size() > 0 && startOffsets.size() == endOffsets.size()) {
+            return new ExecuteResult(ExecuteResult.State.SUCCEED, "skipped, as the offset is provided.");
+        }
+
+        final KafkaConfig kafakaConfig = KafkaConfigManager.getInstance(context.getConfig()).getKafkaConfig(cube.getFactTable());
+        final String brokers = KafkaClient.getKafkaBrokers(kafakaConfig);
+        final String topic = kafakaConfig.getTopic();
+        try (final KafkaConsumer consumer = KafkaClient.getKafkaConsumer(brokers, cube.getName(), null)) {
+            final List<PartitionInfo> partitionInfos = consumer.partitionsFor(topic);
+
+            if (startOffsets.isEmpty()) {
+                // user didn't specify start offset, use the biggest offset in existing segments as start
+                for (CubeSegment seg : cube.getSegments()) {
+                    Map<Integer, Long> segEndOffset = KafkaOffsetMapping.parseOffsetEnd(seg);
+                    for (PartitionInfo partition : partitionInfos) {
+                        int partitionId = partition.partition();
+                        if (segEndOffset.containsKey(partitionId)) {
+                            startOffsets.put(partitionId, Math.max(startOffsets.containsKey(partitionId) ? startOffsets.get(partitionId) : 0, segEndOffset.get(partitionId)));
+                        }
+                    }
+                }
+
+                if (partitionInfos.size() > startOffsets.size()) {
+                    // has new partition added
+                    for (int x = startOffsets.size(); x < partitionInfos.size(); x++) {
+                        long earliest = KafkaClient.getEarliestOffset(consumer, topic, partitionInfos.get(x).partition());
+                        startOffsets.put(partitionInfos.get(x).partition(), earliest);
+                    }
+                }
+
+                logger.info("Get start offset for segment " + segment.getName() + ": " + startOffsets.toString());
+            }
+
+            if (endOffsets.isEmpty()) {
+                // user didn't specify end offset, use latest offset in kafka
+                for (PartitionInfo partitionInfo : partitionInfos) {
+                    long latest = KafkaClient.getLatestOffset(consumer, topic, partitionInfo.partition());
+                    endOffsets.put(partitionInfo.partition(), latest);
+                }
+
+                logger.info("Get end offset for segment " + segment.getName() + ": " + endOffsets.toString());
+            }
+        }
+
+        KafkaOffsetMapping.saveOffsetStart(segment, startOffsets);
+        KafkaOffsetMapping.saveOffsetEnd(segment, endOffsets);
+
+        segment.setName(CubeSegment.makeSegmentName(0, 0, segment.getSourceOffsetStart(), segment.getSourceOffsetEnd()));
+        CubeUpdate cubeBuilder = new CubeUpdate(cube);
+        cubeBuilder.setToUpdateSegs(segment);
+        try {
+            cubeManager.updateCube(cubeBuilder);
+            return new ExecuteResult(ExecuteResult.State.SUCCEED, "succeed");
+        } catch (IOException e) {
+            logger.error("fail to update cube segment offset", e);
+            return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java
index cb6a72b..6b7d658 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java
@@ -1,37 +1,20 @@
 /*
- *
- *
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *
- *  contributor license agreements. See the NOTICE file distributed with
- *
- *  this work for additional information regarding copyright ownership.
- *
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *
- *  (the "License"); you may not use this file except in compliance with
- *
- *  the License. You may obtain a copy of the License at
- *
- *
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *
- *
- *  Unless required by applicable law or agreed to in writing, software
- *
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *
- *  See the License for the specific language governing permissions and
- *
- *  limitations under the License.
- *
- * /
- */
-
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
 package org.apache.kylin.source.kafka;
 
 import java.lang.reflect.Constructor;

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/StringStreamingParser.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/StringStreamingParser.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/StringStreamingParser.java
index 8888d67..666297f 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/StringStreamingParser.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/StringStreamingParser.java
@@ -1,37 +1,20 @@
 /*
- *
- *
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *
- *  contributor license agreements. See the NOTICE file distributed with
- *
- *  this work for additional information regarding copyright ownership.
- *
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *
- *  (the "License"); you may not use this file except in compliance with
- *
- *  the License. You may obtain a copy of the License at
- *
- *
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *
- *
- *  Unless required by applicable law or agreed to in writing, software
- *
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *
- *  See the License for the specific language governing permissions and
- *
- *  limitations under the License.
- *
- * /
- */
-
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
 package org.apache.kylin.source.kafka;
 
 import java.nio.ByteBuffer;


[15/50] [abbrv] kylin git commit: Revert "remove unnecessary raw measure"

Posted by sh...@apache.org.
Revert "remove unnecessary raw measure"


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/d7a3fdf5
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/d7a3fdf5
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/d7a3fdf5

Branch: refs/heads/KYLIN-1726
Commit: d7a3fdf57acb6fbabf94669aaca869e47d89aa13
Parents: a05f111
Author: Hongbin Ma <ma...@apache.org>
Authored: Sat Sep 10 14:40:25 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Sat Sep 10 14:42:46 2016 +0800

----------------------------------------------------------------------
 .../kylin/measure/topn/TopNMeasureType.java     |   2 +
 .../test_case_data/localmeta/cube_desc/ssb.json | 409 ++++++-------
 .../test_kylin_cube_with_slr_desc.json          | 389 +++++-------
 ...st_kylin_cube_with_view_inner_join_desc.json | 388 +++++-------
 ...est_kylin_cube_with_view_left_join_desc.json | 388 +++++-------
 .../test_kylin_cube_without_slr_desc.json       |  61 +-
 ...t_kylin_cube_without_slr_left_join_desc.json | 584 +++++++++----------
 .../test_streaming_table_cube_desc.json         | 245 ++++----
 8 files changed, 1048 insertions(+), 1418 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/d7a3fdf5/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
index 0756056..01eb90c 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
@@ -274,9 +274,11 @@ public class TopNMeasureType extends MeasureType<TopNCounter<ByteArray>> {
 
         if (sum.isSum() == false)
             return false;
+        
         if (sum.getParameter() == null || sum.getParameter().getColRefs() == null || sum.getParameter().getColRefs().size() == 0)
             return false;
 
+
         TblColRef sumCol = sum.getParameter().getColRefs().get(0);
         return sumCol.equals(topnNumCol);
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/d7a3fdf5/examples/test_case_data/localmeta/cube_desc/ssb.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/ssb.json b/examples/test_case_data/localmeta/cube_desc/ssb.json
index 4903979..d3ea10b 100644
--- a/examples/test_case_data/localmeta/cube_desc/ssb.json
+++ b/examples/test_case_data/localmeta/cube_desc/ssb.json
@@ -1,256 +1,179 @@
 {
-  "uuid": "5c44df30-daec-486e-af90-927bf7851057",
-  "name": "ssb",
-  "description": "",
-  "dimensions": [
-    {
-      "name": "SSB.PART_DERIVED",
-      "table": "SSB.PART",
-      "column": null,
-      "derived": [
-        "P_MFGR",
-        "P_CATEGORY",
-        "P_BRAND"
-      ]
-    },
-    {
-      "name": "C_CITY",
-      "table": "SSB.CUSTOMER",
-      "column": "C_CITY",
-      "derived": null
-    },
-    {
-      "name": "C_REGION",
-      "table": "SSB.CUSTOMER",
-      "column": "C_REGION",
-      "derived": null
-    },
-    {
-      "name": "C_NATION",
-      "table": "SSB.CUSTOMER",
-      "column": "C_NATION",
-      "derived": null
-    },
-    {
-      "name": "S_CITY",
-      "table": "SSB.SUPPLIER",
-      "column": "S_CITY",
-      "derived": null
-    },
-    {
-      "name": "S_REGION",
-      "table": "SSB.SUPPLIER",
-      "column": "S_REGION",
-      "derived": null
-    },
-    {
-      "name": "S_NATION",
-      "table": "SSB.SUPPLIER",
-      "column": "S_NATION",
-      "derived": null
-    },
-    {
-      "name": "D_YEAR",
-      "table": "SSB.DATES",
-      "column": "D_YEAR",
-      "derived": null
-    },
-    {
-      "name": "D_YEARMONTH",
-      "table": "SSB.DATES",
-      "column": "D_YEARMONTH",
-      "derived": null
-    },
-    {
-      "name": "D_YEARMONTHNUM",
-      "table": "SSB.DATES",
-      "column": "D_YEARMONTHNUM",
-      "derived": null
-    },
-    {
-      "name": "D_WEEKNUMINYEAR",
-      "table": "SSB.DATES",
-      "column": "D_WEEKNUMINYEAR",
-      "derived": null
-    }
-  ],
-  "measures": [
-    {
-      "name": "_COUNT_",
-      "function": {
-        "expression": "COUNT",
-        "parameter": {
-          "type": "constant",
-          "value": "1",
-          "next_parameter": null
-        },
-        "returntype": "bigint"
+  "uuid" : "5c44df30-daec-486e-af90-927bf7851057",
+  "name" : "ssb",
+  "description" : "",
+  "dimensions" : [ {
+    "name" : "SSB.PART_DERIVED",
+    "table" : "SSB.PART",
+    "column" : null,
+    "derived" : [ "P_MFGR", "P_CATEGORY", "P_BRAND" ]
+  }, {
+    "name" : "C_CITY",
+    "table" : "SSB.CUSTOMER",
+    "column" : "C_CITY",
+    "derived" : null
+  }, {
+    "name" : "C_REGION",
+    "table" : "SSB.CUSTOMER",
+    "column" : "C_REGION",
+    "derived" : null
+  }, {
+    "name" : "C_NATION",
+    "table" : "SSB.CUSTOMER",
+    "column" : "C_NATION",
+    "derived" : null
+  }, {
+    "name" : "S_CITY",
+    "table" : "SSB.SUPPLIER",
+    "column" : "S_CITY",
+    "derived" : null
+  }, {
+    "name" : "S_REGION",
+    "table" : "SSB.SUPPLIER",
+    "column" : "S_REGION",
+    "derived" : null
+  }, {
+    "name" : "S_NATION",
+    "table" : "SSB.SUPPLIER",
+    "column" : "S_NATION",
+    "derived" : null
+  }, {
+    "name" : "D_YEAR",
+    "table" : "SSB.DATES",
+    "column" : "D_YEAR",
+    "derived" : null
+  }, {
+    "name" : "D_YEARMONTH",
+    "table" : "SSB.DATES",
+    "column" : "D_YEARMONTH",
+    "derived" : null
+  }, {
+    "name" : "D_YEARMONTHNUM",
+    "table" : "SSB.DATES",
+    "column" : "D_YEARMONTHNUM",
+    "derived" : null
+  }, {
+    "name" : "D_WEEKNUMINYEAR",
+    "table" : "SSB.DATES",
+    "column" : "D_WEEKNUMINYEAR",
+    "derived" : null
+  } ],
+  "measures" : [ {
+    "name" : "_COUNT_",
+    "function" : {
+      "expression" : "COUNT",
+      "parameter" : {
+        "type" : "constant",
+        "value" : "1",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "bigint"
     },
-    {
-      "name": "TOTAL_REVENUE",
-      "function": {
-        "expression": "SUM",
-        "parameter": {
-          "type": "column",
-          "value": "LO_REVENUE",
-          "next_parameter": null
-        },
-        "returntype": "bigint"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "TOTAL_REVENUE",
+    "function" : {
+      "expression" : "SUM",
+      "parameter" : {
+        "type" : "column",
+        "value" : "LO_REVENUE",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "bigint"
     },
-    {
-      "name": "TOTAL_SUPPLYCOST",
-      "function": {
-        "expression": "SUM",
-        "parameter": {
-          "type": "column",
-          "value": "LO_SUPPLYCOST",
-          "next_parameter": null
-        },
-        "returntype": "bigint"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "TOTAL_SUPPLYCOST",
+    "function" : {
+      "expression" : "SUM",
+      "parameter" : {
+        "type" : "column",
+        "value" : "LO_SUPPLYCOST",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "bigint"
     },
-    {
-      "name": "TOTAL_V_REVENUE",
-      "function": {
-        "expression": "SUM",
-        "parameter": {
-          "type": "column",
-          "value": "V_REVENUE",
-          "next_parameter": null
-        },
-        "returntype": "bigint"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "TOTAL_V_REVENUE",
+    "function" : {
+      "expression" : "SUM",
+      "parameter" : {
+        "type" : "column",
+        "value" : "V_REVENUE",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
-    }
-  ],
-  "rowkey": {
-    "rowkey_columns": [
-      {
-        "column": "LO_PARTKEY",
-        "encoding": "dict"
-      },
-      {
-        "column": "C_CITY",
-        "encoding": "dict"
-      },
-      {
-        "column": "C_REGION",
-        "encoding": "dict"
-      },
-      {
-        "column": "C_NATION",
-        "encoding": "dict"
-      },
-      {
-        "column": "S_CITY",
-        "encoding": "dict"
-      },
-      {
-        "column": "S_REGION",
-        "encoding": "dict"
-      },
-      {
-        "column": "S_NATION",
-        "encoding": "dict"
-      },
-      {
-        "column": "D_YEAR",
-        "encoding": "dict"
-      },
-      {
-        "column": "D_YEARMONTH",
-        "encoding": "dict"
-      },
-      {
-        "column": "D_YEARMONTHNUM",
-        "encoding": "dict"
-      },
-      {
-        "column": "D_WEEKNUMINYEAR",
-        "encoding": "dict"
-      }
-    ]
+      "returntype" : "bigint"
+    },
+    "dependent_measure_ref" : null
+  } ],
+  "rowkey" : {
+    "rowkey_columns" : [ {
+      "column" : "LO_PARTKEY",
+      "encoding" : "dict"
+    }, {
+      "column" : "C_CITY",
+      "encoding" : "dict"
+    }, {
+      "column" : "C_REGION",
+      "encoding" : "dict"
+    }, {
+      "column" : "C_NATION",
+      "encoding" : "dict"
+    }, {
+      "column" : "S_CITY",
+      "encoding" : "dict"
+    }, {
+      "column" : "S_REGION",
+      "encoding" : "dict"
+    }, {
+      "column" : "S_NATION",
+      "encoding" : "dict"
+    }, {
+      "column" : "D_YEAR",
+      "encoding" : "dict"
+    }, {
+      "column" : "D_YEARMONTH",
+      "encoding" : "dict"
+    }, {
+      "column" : "D_YEARMONTHNUM",
+      "encoding" : "dict"
+    }, {
+      "column" : "D_WEEKNUMINYEAR",
+      "encoding" : "dict"
+    } ]
   },
-  "signature": "5iV8LVYs+PmVUju8QNQ5TQ==",
-  "last_modified": 1457503036686,
-  "model_name": "ssb",
-  "null_string": null,
-  "hbase_mapping": {
-    "column_family": [
-      {
-        "name": "F1",
-        "columns": [
-          {
-            "qualifier": "M",
-            "measure_refs": [
-              "_COUNT_",
-              "TOTAL_REVENUE",
-              "TOTAL_SUPPLYCOST",
-              "TOTAL_V_REVENUE"
-            ]
-          }
-        ]
-      }
-    ]
+  "signature" : "5iV8LVYs+PmVUju8QNQ5TQ==",
+  "last_modified" : 1457503036686,
+  "model_name" : "ssb",
+  "null_string" : null,
+  "hbase_mapping" : {
+    "column_family" : [ {
+      "name" : "F1",
+      "columns" : [ {
+        "qualifier" : "M",
+        "measure_refs" : [ "_COUNT_", "TOTAL_REVENUE", "TOTAL_SUPPLYCOST", "TOTAL_V_REVENUE" ]
+      } ]
+    } ]
   },
-  "aggregation_groups": [
-    {
-      "includes": [
-        "LO_PARTKEY",
-        "C_CITY",
-        "C_REGION",
-        "C_NATION",
-        "S_CITY",
-        "S_REGION",
-        "S_NATION",
-        "D_YEAR",
-        "D_YEARMONTH",
-        "D_YEARMONTHNUM",
-        "D_WEEKNUMINYEAR"
-      ],
-      "select_rule": {
-        "hierarchy_dims": [
-          [
-            "C_REGION",
-            "C_NATION",
-            "C_CITY"
-          ],
-          [
-            "S_REGION",
-            "S_NATION",
-            "S_CITY"
-          ],
-          [
-            "D_YEARMONTH",
-            "D_YEARMONTHNUM",
-            "D_WEEKNUMINYEAR"
-          ]
-        ],
-        "mandatory_dims": [
-          "D_YEAR"
-        ],
-        "joint_dims": []
-      }
+  "aggregation_groups" : [ {
+    "includes" : [ "LO_PARTKEY", "C_CITY", "C_REGION", "C_NATION", "S_CITY", "S_REGION", "S_NATION", "D_YEAR", "D_YEARMONTH", "D_YEARMONTHNUM", "D_WEEKNUMINYEAR" ],
+    "select_rule" : {
+      "hierarchy_dims" : [ [ "C_REGION", "C_NATION", "C_CITY" ], [ "S_REGION", "S_NATION", "S_CITY" ], [ "D_YEARMONTH", "D_YEARMONTHNUM", "D_WEEKNUMINYEAR" ] ],
+      "mandatory_dims" : [ "D_YEAR" ],
+      "joint_dims" : [ ]
     }
-  ],
-  "notify_list": [],
-  "status_need_notify": [],
-  "partition_date_start": 694224000000,
-  "partition_date_end": 3153600000000,
-  "auto_merge_time_ranges": [
-    604800000,
-    2419200000
-  ],
-  "retention_range": 0,
-  "engine_type": 2,
-  "storage_type": 2,
-  "override_kylin_properties": {
-    "kylin.hbase.default.compression.codec": "lz4",
-    "kylin.cube.aggrgroup.isMandatoryOnlyValid": "true"
+  } ],
+  "notify_list" : [ ],
+  "status_need_notify" : [ ],
+  "partition_date_start" : 694224000000,
+  "partition_date_end" : 3153600000000,
+  "auto_merge_time_ranges" : [ 604800000, 2419200000 ],
+  "retention_range" : 0,
+  "engine_type" : 2,
+  "storage_type" : 2,
+  "override_kylin_properties" : {
+    "kylin.hbase.default.compression.codec" : "lz4",
+    "kylin.cube.aggrgroup.isMandatoryOnlyValid" : "true"
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/d7a3fdf5/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_slr_desc.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_slr_desc.json b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_slr_desc.json
index f62d196..4064fcb 100644
--- a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_slr_desc.json
+++ b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_slr_desc.json
@@ -1,245 +1,172 @@
 {
-  "uuid": "a24ca905-1fc6-4f67-985c-38fa5aeafd92",
-  "name": "test_kylin_cube_with_slr_desc",
-  "description": null,
-  "dimensions": [
-    {
-      "name": "CAL_DT",
-      "table": "EDW.TEST_CAL_DT",
-      "column": "{FK}",
-      "derived": [
-        "WEEK_BEG_DT"
-      ]
-    },
-    {
-      "name": "CATEGORY",
-      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
-      "column": "{FK}",
-      "derived": [
-        "USER_DEFINED_FIELD1",
-        "USER_DEFINED_FIELD3",
-        "UPD_DATE",
-        "UPD_USER"
-      ]
-    },
-    {
-      "name": "CATEGORY_HIERARCHY",
-      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
-      "column": "META_CATEG_NAME",
-      "derived": null
-    },
-    {
-      "name": "CATEGORY_HIERARCHY",
-      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
-      "column": "CATEG_LVL2_NAME",
-      "derived": null
-    },
-    {
-      "name": "CATEGORY_HIERARCHY",
-      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
-      "column": "CATEG_LVL3_NAME",
-      "derived": null
-    },
-    {
-      "name": "LSTG_FORMAT_NAME",
-      "table": "DEFAULT.TEST_KYLIN_FACT",
-      "column": "LSTG_FORMAT_NAME",
-      "derived": null
-    },
-    {
-      "name": "SITE_ID",
-      "table": "EDW.TEST_SITES",
-      "column": "{FK}",
-      "derived": [
-        "SITE_NAME",
-        "CRE_USER"
-      ]
-    },
-    {
-      "name": "SELLER_TYPE_CD",
-      "table": "EDW.TEST_SELLER_TYPE_DIM",
-      "column": "{FK}",
-      "derived": [
-        "SELLER_TYPE_DESC"
-      ]
-    },
-    {
-      "name": "SELLER_ID",
-      "table": "DEFAULT.TEST_KYLIN_FACT",
-      "column": "SELLER_ID",
-      "derived": null
-    }
-  ],
-  "measures": [
-    {
-      "name": "GMV_SUM",
-      "function": {
-        "expression": "SUM",
-        "parameter": {
-          "type": "column",
-          "value": "PRICE",
-          "next_parameter": null
-        },
-        "returntype": "decimal(19,4)"
+  "uuid" : "a24ca905-1fc6-4f67-985c-38fa5aeafd92",
+ 
+  "name" : "test_kylin_cube_with_slr_desc",
+  "description" : null,
+  "dimensions" : [ {
+    "name" : "CAL_DT",
+    "table" : "EDW.TEST_CAL_DT",
+    "column" : "{FK}",
+    "derived" : [ "WEEK_BEG_DT" ]
+  }, {
+    "name" : "CATEGORY",
+    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
+    "column" : "{FK}",
+    "derived" : [ "USER_DEFINED_FIELD1", "USER_DEFINED_FIELD3", "UPD_DATE", "UPD_USER" ]
+  }, {
+    "name" : "CATEGORY_HIERARCHY",
+    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
+    "column" : "META_CATEG_NAME",
+    "derived" : null
+  }, {
+    "name" : "CATEGORY_HIERARCHY",
+    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
+    "column" : "CATEG_LVL2_NAME",
+    "derived" : null
+  }, {
+    "name" : "CATEGORY_HIERARCHY",
+    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
+    "column" : "CATEG_LVL3_NAME",
+    "derived" : null
+  }, {
+    "name" : "LSTG_FORMAT_NAME",
+    "table" : "DEFAULT.TEST_KYLIN_FACT",
+    "column" : "LSTG_FORMAT_NAME",
+    "derived" : null
+  }, {
+    "name" : "SITE_ID",
+    "table" : "EDW.TEST_SITES",
+    "column" : "{FK}",
+    "derived" : [ "SITE_NAME", "CRE_USER" ]
+  }, {
+    "name" : "SELLER_TYPE_CD",
+    "table" : "EDW.TEST_SELLER_TYPE_DIM",
+    "column" : "{FK}",
+    "derived" : [ "SELLER_TYPE_DESC" ]
+  }, {
+    "name" : "SELLER_ID",
+    "table" : "DEFAULT.TEST_KYLIN_FACT",
+    "column" : "SELLER_ID",
+    "derived" : null
+  } ],
+  "measures" : [ {
+    "name" : "GMV_SUM",
+    "function" : {
+      "expression" : "SUM",
+      "parameter" : {
+        "type" : "column",
+        "value" : "PRICE",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "decimal(19,4)"
     },
-    {
-      "name": "GMV_MIN",
-      "function": {
-        "expression": "MIN",
-        "parameter": {
-          "type": "column",
-          "value": "PRICE",
-          "next_parameter": null
-        },
-        "returntype": "decimal(19,4)"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "GMV_MIN",
+    "function" : {
+      "expression" : "MIN",
+      "parameter" : {
+        "type" : "column",
+        "value" : "PRICE",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "decimal(19,4)"
     },
-    {
-      "name": "GMV_MAX",
-      "function": {
-        "expression": "MAX",
-        "parameter": {
-          "type": "column",
-          "value": "PRICE",
-          "next_parameter": null
-        },
-        "returntype": "decimal(19,4)"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "GMV_MAX",
+    "function" : {
+      "expression" : "MAX",
+      "parameter" : {
+        "type" : "column",
+        "value" : "PRICE",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "decimal(19,4)"
     },
-    {
-      "name": "TRANS_CNT",
-      "function": {
-        "expression": "COUNT",
-        "parameter": {
-          "type": "constant",
-          "value": "1",
-          "next_parameter": null
-        },
-        "returntype": "bigint"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "TRANS_CNT",
+    "function" : {
+      "expression" : "COUNT",
+      "parameter" : {
+        "type" : "constant",
+        "value" : "1",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "bigint"
     },
-    {
-      "name": "ITEM_COUNT_SUM",
-      "function": {
-        "expression": "SUM",
-        "parameter": {
-          "type": "column",
-          "value": "ITEM_COUNT",
-          "next_parameter": null
-        },
-        "returntype": "bigint"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "ITEM_COUNT_SUM",
+    "function" : {
+      "expression" : "SUM",
+      "parameter" : {
+        "type" : "column",
+        "value" : "ITEM_COUNT",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
-    }
-  ],
-  "rowkey": {
-    "rowkey_columns": [
-      {
-        "column": "seller_id",
-        "encoding": "int:4",
-        "isShardBy": true
-      },
-      {
-        "column": "cal_dt",
-        "encoding": "dict"
-      },
-      {
-        "column": "leaf_categ_id",
-        "encoding": "fixed_length:18"
-      },
-      {
-        "column": "meta_categ_name",
-        "encoding": "dict"
-      },
-      {
-        "column": "categ_lvl2_name",
-        "encoding": "dict"
-      },
-      {
-        "column": "categ_lvl3_name",
-        "encoding": "dict"
-      },
-      {
-        "column": "lstg_format_name",
-        "encoding": "fixed_length:12"
-      },
-      {
-        "column": "lstg_site_id",
-        "encoding": "dict"
-      },
-      {
-        "column": "slr_segment_cd",
-        "encoding": "dict"
-      }
-    ]
+      "returntype" : "bigint"
+    },
+    "dependent_measure_ref" : null
+  } ],
+  "rowkey" : {
+    "rowkey_columns" : [ {
+      "column" : "seller_id",
+      "encoding" : "int:4",
+      "isShardBy" : true
+    }, {
+      "column" : "cal_dt",
+      "encoding" : "dict"
+    }, {
+      "column" : "leaf_categ_id",
+      "encoding" : "fixed_length:18"
+    }, {
+      "column" : "meta_categ_name",
+      "encoding" : "dict"
+    }, {
+      "column" : "categ_lvl2_name",
+      "encoding" : "dict"
+    }, {
+      "column" : "categ_lvl3_name",
+      "encoding" : "dict"
+    }, {
+      "column" : "lstg_format_name",
+      "encoding" : "fixed_length:12"
+    }, {
+      "column" : "lstg_site_id",
+      "encoding" : "dict"
+    }, {
+      "column" : "slr_segment_cd",
+      "encoding" : "dict"
+    } ]
   },
-  "signature": null,
-  "last_modified": 1448959801271,
-  "model_name": "test_kylin_inner_join_model_desc",
-  "null_string": null,
-  "hbase_mapping": {
-    "column_family": [
-      {
-        "name": "f1",
-        "columns": [
-          {
-            "qualifier": "m",
-            "measure_refs": [
-              "gmv_sum",
-              "gmv_min",
-              "gmv_max",
-              "trans_cnt",
-              "item_count_sum"
-            ]
-          }
-        ]
-      }
-    ]
+  "signature" : null,
+  "last_modified" : 1448959801271,
+  "model_name" : "test_kylin_inner_join_model_desc",
+  "null_string" : null,
+  "hbase_mapping" : {
+    "column_family" : [ {
+      "name" : "f1",
+      "columns" : [ {
+        "qualifier" : "m",
+        "measure_refs" : [ "gmv_sum", "gmv_min", "gmv_max", "trans_cnt", "item_count_sum" ]
+      } ]
+    } ]
   },
-  "aggregation_groups": [
-    {
-      "includes": [
-        "cal_dt",
-        "categ_lvl2_name",
-        "categ_lvl3_name",
-        "leaf_categ_id",
-        "lstg_format_name",
-        "lstg_site_id",
-        "meta_categ_name",
-        "seller_id",
-        "slr_segment_cd"
-      ],
-      "select_rule": {
-        "hierarchy_dims": [
-          [
-            "META_CATEG_NAME",
-            "CATEG_LVL2_NAME",
-            "CATEG_LVL3_NAME"
-          ]
-        ],
-        "mandatory_dims": [
-          "seller_id"
-        ],
-        "joint_dims": [
-          [
-            "lstg_format_name",
-            "lstg_site_id",
-            "slr_segment_cd"
-          ]
-        ]
-      }
+  "aggregation_groups" : [ {
+    "includes" : [ "cal_dt", "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "lstg_format_name", "lstg_site_id", "meta_categ_name", "seller_id", "slr_segment_cd" ],
+    "select_rule" : {
+      "hierarchy_dims" : [ [ "META_CATEG_NAME", "CATEG_LVL2_NAME", "CATEG_LVL3_NAME" ] ],
+      "mandatory_dims" : ["seller_id"],
+      "joint_dims" : [ [ "lstg_format_name", "lstg_site_id", "slr_segment_cd" ] ]
     }
-  ],
-  "notify_list": null,
-  "status_need_notify": [],
-  "auto_merge_time_ranges": null,
-  "retention_range": 0,
-  "engine_type": 2,
-  "storage_type": 2,
-  "partition_date_start": 0
+  } ],
+  "notify_list" : null,
+  "status_need_notify" : [ ],
+  "auto_merge_time_ranges" : null,
+  "retention_range" : 0,
+  "engine_type" : 2,
+  "storage_type" : 2,
+  "partition_date_start" : 0
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/d7a3fdf5/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_inner_join_desc.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_inner_join_desc.json b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_inner_join_desc.json
index e3a3e70..d4c64b5 100644
--- a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_inner_join_desc.json
+++ b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_inner_join_desc.json
@@ -1,249 +1,169 @@
 {
-  "uuid": "9876b7a8-3929-4dff-b59d-2100aadc8dbf",
-  "name": "test_kylin_cube_with_view_inner_join_desc",
-  "description": null,
-  "dimensions": [
-    {
-      "name": "CAL_DT",
-      "table": "EDW.V_TEST_CAL_DT",
-      "column": "{FK}",
-      "derived": [
-        "WEEK_BEG_DT"
-      ]
-    },
-    {
-      "name": "CATEGORY",
-      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
-      "column": "{FK}",
-      "derived": [
-        "USER_DEFINED_FIELD1",
-        "USER_DEFINED_FIELD3",
-        "UPD_DATE",
-        "UPD_USER"
-      ]
-    },
-    {
-      "name": "CATEGORY_HIERARCHY",
-      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
-      "column": "META_CATEG_NAME",
-      "derived": null
-    },
-    {
-      "name": "CATEGORY_HIERARCHY",
-      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
-      "column": "CATEG_LVL2_NAME",
-      "derived": null
-    },
-    {
-      "name": "CATEGORY_HIERARCHY",
-      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
-      "column": "CATEG_LVL3_NAME",
-      "derived": null
-    },
-    {
-      "name": "LSTG_FORMAT_NAME",
-      "table": "DEFAULT.TEST_KYLIN_FACT",
-      "column": "LSTG_FORMAT_NAME",
-      "derived": null
-    },
-    {
-      "name": "SITE_ID",
-      "table": "EDW.TEST_SITES",
-      "column": "{FK}",
-      "derived": [
-        "SITE_NAME",
-        "CRE_USER"
-      ]
-    },
-    {
-      "name": "SELLER_TYPE_CD",
-      "table": "EDW.TEST_SELLER_TYPE_DIM",
-      "column": "{FK}",
-      "derived": [
-        "SELLER_TYPE_DESC"
-      ]
-    }
-  ],
-  "measures": [
-    {
-      "name": "GMV_SUM",
-      "function": {
-        "expression": "SUM",
-        "parameter": {
-          "type": "column",
-          "value": "PRICE",
-          "next_parameter": null
-        },
-        "returntype": "decimal(19,4)"
+  "uuid" : "9876b7a8-3929-4dff-b59d-2100aadc8dbf",
+  "name" : "test_kylin_cube_with_view_inner_join_desc",
+  "description" : null,
+  "dimensions" : [ {
+    "name" : "CAL_DT",
+    "table" : "EDW.V_TEST_CAL_DT",
+    "column" : "{FK}",
+    "derived" : [ "WEEK_BEG_DT" ]
+  }, {
+    "name" : "CATEGORY",
+    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
+    "column" : "{FK}",
+    "derived" : [ "USER_DEFINED_FIELD1", "USER_DEFINED_FIELD3", "UPD_DATE", "UPD_USER" ]
+  }, {
+    "name" : "CATEGORY_HIERARCHY",
+    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
+    "column" : "META_CATEG_NAME",
+    "derived" : null
+  }, {
+    "name" : "CATEGORY_HIERARCHY",
+    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
+    "column" : "CATEG_LVL2_NAME",
+    "derived" : null
+  }, {
+    "name" : "CATEGORY_HIERARCHY",
+    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
+    "column" : "CATEG_LVL3_NAME",
+    "derived" : null
+  }, {
+    "name" : "LSTG_FORMAT_NAME",
+    "table" : "DEFAULT.TEST_KYLIN_FACT",
+    "column" : "LSTG_FORMAT_NAME",
+    "derived" : null
+  }, {
+    "name" : "SITE_ID",
+    "table" : "EDW.TEST_SITES",
+    "column" : "{FK}",
+    "derived" : [ "SITE_NAME", "CRE_USER" ]
+  }, {
+    "name" : "SELLER_TYPE_CD",
+    "table" : "EDW.TEST_SELLER_TYPE_DIM",
+    "column" : "{FK}",
+    "derived" : [ "SELLER_TYPE_DESC" ]
+  } ],
+  "measures" : [ {
+    "name" : "GMV_SUM",
+    "function" : {
+      "expression" : "SUM",
+      "parameter" : {
+        "type" : "column",
+        "value" : "PRICE",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "decimal(19,4)"
     },
-    {
-      "name": "GMV_MIN",
-      "function": {
-        "expression": "MIN",
-        "parameter": {
-          "type": "column",
-          "value": "PRICE",
-          "next_parameter": null
-        },
-        "returntype": "decimal(19,4)"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "GMV_MIN",
+    "function" : {
+      "expression" : "MIN",
+      "parameter" : {
+        "type" : "column",
+        "value" : "PRICE",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "decimal(19,4)"
     },
-    {
-      "name": "GMV_MAX",
-      "function": {
-        "expression": "MAX",
-        "parameter": {
-          "type": "column",
-          "value": "PRICE",
-          "next_parameter": null
-        },
-        "returntype": "decimal(19,4)"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "GMV_MAX",
+    "function" : {
+      "expression" : "MAX",
+      "parameter" : {
+        "type" : "column",
+        "value" : "PRICE",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "decimal(19,4)"
     },
-    {
-      "name": "TRANS_CNT",
-      "function": {
-        "expression": "COUNT",
-        "parameter": {
-          "type": "constant",
-          "value": "1",
-          "next_parameter": null
-        },
-        "returntype": "bigint"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "TRANS_CNT",
+    "function" : {
+      "expression" : "COUNT",
+      "parameter" : {
+        "type" : "constant",
+        "value" : "1",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "bigint"
     },
-    {
-      "name": "ITEM_COUNT_SUM",
-      "function": {
-        "expression": "SUM",
-        "parameter": {
-          "type": "column",
-          "value": "ITEM_COUNT",
-          "next_parameter": null
-        },
-        "returntype": "bigint"
-      },
-      "dependent_measure_ref": null
-    }
-  ],
-  "rowkey": {
-    "rowkey_columns": [
-      {
-        "column": "cal_dt",
-        "encoding": "dict"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "ITEM_COUNT_SUM",
+    "function" : {
+      "expression" : "SUM",
+      "parameter" : {
+        "type" : "column",
+        "value" : "ITEM_COUNT",
+        "next_parameter" : null
       },
-      {
-        "column": "leaf_categ_id",
-        "encoding": "dict"
-      },
-      {
-        "column": "meta_categ_name",
-        "encoding": "dict"
-      },
-      {
-        "column": "categ_lvl2_name",
-        "encoding": "dict"
-      },
-      {
-        "column": "categ_lvl3_name",
-        "encoding": "dict"
-      },
-      {
-        "column": "lstg_format_name",
-        "encoding": "dict"
-      },
-      {
-        "column": "lstg_site_id",
-        "encoding": "dict"
-      },
-      {
-        "column": "slr_segment_cd",
-        "encoding": "dict"
-      }
-    ]
+      "returntype" : "bigint"
+    },
+    "dependent_measure_ref" : null
+  }],
+  "rowkey" : {
+    "rowkey_columns" : [ {
+      "column" : "cal_dt",
+      "encoding" : "dict"
+    }, {
+      "column" : "leaf_categ_id",
+      "encoding" : "dict"
+    }, {
+      "column" : "meta_categ_name",
+      "encoding" : "dict"
+    }, {
+      "column" : "categ_lvl2_name",
+      "encoding" : "dict"
+    }, {
+      "column" : "categ_lvl3_name",
+      "encoding" : "dict"
+    }, {
+      "column" : "lstg_format_name",
+      "encoding" : "dict"
+    }, {
+      "column" : "lstg_site_id",
+      "encoding" : "dict"
+    }, {
+      "column" : "slr_segment_cd",
+      "encoding" : "dict"
+    } ]
   },
-  "signature": null,
-  "last_modified": 1448959801311,
-  "model_name": "test_kylin_inner_join_view_model_desc",
-  "null_string": null,
-  "hbase_mapping": {
-    "column_family": [
-      {
-        "name": "f1",
-        "columns": [
-          {
-            "qualifier": "m",
-            "measure_refs": [
-              "gmv_sum",
-              "gmv_min",
-              "gmv_max",
-              "trans_cnt",
-              "item_count_sum"
-            ]
-          }
-        ]
-      }
-    ]
+  "signature" : null,
+  "last_modified" : 1448959801311,
+  "model_name" : "test_kylin_inner_join_view_model_desc",
+  "null_string" : null,
+  "hbase_mapping" : {
+    "column_family" : [ {
+      "name" : "f1",
+      "columns" : [ {
+        "qualifier" : "m",
+        "measure_refs" : [ "gmv_sum", "gmv_min", "gmv_max", "trans_cnt", "item_count_sum" ]
+      } ]
+    }]
   },
-  "aggregation_groups": [
-    {
-      "includes": [
-        "cal_dt",
-        "categ_lvl2_name",
-        "categ_lvl3_name",
-        "leaf_categ_id",
-        "lstg_format_name",
-        "lstg_site_id",
-        "meta_categ_name"
-      ],
-      "select_rule": {
-        "hierarchy_dims": [],
-        "mandatory_dims": [
-          "cal_dt"
-        ],
-        "joint_dims": [
-          [
-            "categ_lvl2_name",
-            "categ_lvl3_name",
-            "leaf_categ_id",
-            "meta_categ_name"
-          ]
-        ]
-      }
-    },
-    {
-      "includes": [
-        "cal_dt",
-        "categ_lvl2_name",
-        "categ_lvl3_name",
-        "leaf_categ_id",
-        "meta_categ_name"
-      ],
-      "select_rule": {
-        "hierarchy_dims": [
-          [
-            "META_CATEG_NAME",
-            "CATEG_LVL2_NAME",
-            "CATEG_LVL3_NAME"
-          ]
-        ],
-        "mandatory_dims": [
-          "cal_dt"
-        ],
-        "joint_dims": []
-      }
+  "aggregation_groups" : [ {
+    "includes" : [ "cal_dt", "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "lstg_format_name", "lstg_site_id", "meta_categ_name"],
+    "select_rule" : {
+      "hierarchy_dims" : [ ],
+      "mandatory_dims" : [ "cal_dt" ],
+      "joint_dims" : [ [ "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "meta_categ_name" ] ]
+    }
+  }, {
+    "includes" : [ "cal_dt", "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "meta_categ_name" ],
+    "select_rule" : {
+      "hierarchy_dims" : [ [ "META_CATEG_NAME", "CATEG_LVL2_NAME", "CATEG_LVL3_NAME" ] ],
+      "mandatory_dims" : [ "cal_dt" ],
+      "joint_dims" : [ ]
     }
-  ],
-  "notify_list": null,
-  "status_need_notify": [],
-  "auto_merge_time_ranges": null,
-  "retention_range": 0,
-  "engine_type": 2,
-  "storage_type": 2,
+  } ],
+  "notify_list" : null,
+  "status_need_notify" : [ ],
+  "auto_merge_time_ranges" : null,
+  "retention_range" : 0,
+  "engine_type" : 2,
+  "storage_type" : 2,
   "partition_date_start": 0
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/d7a3fdf5/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_left_join_desc.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_left_join_desc.json b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_left_join_desc.json
index b17fbff..0388c0e 100644
--- a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_left_join_desc.json
+++ b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_left_join_desc.json
@@ -1,249 +1,169 @@
 {
-  "uuid": "6789b7a8-3929-4dff-b59d-2100aadc8dbf",
-  "name": "test_kylin_cube_with_view_left_join_desc",
-  "description": null,
-  "dimensions": [
-    {
-      "name": "CAL_DT",
-      "table": "EDW.V_TEST_CAL_DT",
-      "column": "{FK}",
-      "derived": [
-        "WEEK_BEG_DT"
-      ]
-    },
-    {
-      "name": "CATEGORY",
-      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
-      "column": "{FK}",
-      "derived": [
-        "USER_DEFINED_FIELD1",
-        "USER_DEFINED_FIELD3",
-        "UPD_DATE",
-        "UPD_USER"
-      ]
-    },
-    {
-      "name": "CATEGORY_HIERARCHY",
-      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
-      "column": "META_CATEG_NAME",
-      "derived": null
-    },
-    {
-      "name": "CATEGORY_HIERARCHY",
-      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
-      "column": "CATEG_LVL2_NAME",
-      "derived": null
-    },
-    {
-      "name": "CATEGORY_HIERARCHY",
-      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
-      "column": "CATEG_LVL3_NAME",
-      "derived": null
-    },
-    {
-      "name": "LSTG_FORMAT_NAME",
-      "table": "DEFAULT.TEST_KYLIN_FACT",
-      "column": "LSTG_FORMAT_NAME",
-      "derived": null
-    },
-    {
-      "name": "SITE_ID",
-      "table": "EDW.TEST_SITES",
-      "column": "{FK}",
-      "derived": [
-        "SITE_NAME",
-        "CRE_USER"
-      ]
-    },
-    {
-      "name": "SELLER_TYPE_CD",
-      "table": "EDW.TEST_SELLER_TYPE_DIM",
-      "column": "{FK}",
-      "derived": [
-        "SELLER_TYPE_DESC"
-      ]
-    }
-  ],
-  "measures": [
-    {
-      "name": "GMV_SUM",
-      "function": {
-        "expression": "SUM",
-        "parameter": {
-          "type": "column",
-          "value": "PRICE",
-          "next_parameter": null
-        },
-        "returntype": "decimal(19,4)"
+  "uuid" : "6789b7a8-3929-4dff-b59d-2100aadc8dbf",
+  "name" : "test_kylin_cube_with_view_left_join_desc",
+  "description" : null,
+  "dimensions" : [ {
+    "name" : "CAL_DT",
+    "table" : "EDW.V_TEST_CAL_DT",
+    "column" : "{FK}",
+    "derived" : [ "WEEK_BEG_DT" ]
+  }, {
+    "name" : "CATEGORY",
+    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
+    "column" : "{FK}",
+    "derived" : [ "USER_DEFINED_FIELD1", "USER_DEFINED_FIELD3", "UPD_DATE", "UPD_USER" ]
+  }, {
+    "name" : "CATEGORY_HIERARCHY",
+    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
+    "column" : "META_CATEG_NAME",
+    "derived" : null
+  }, {
+    "name" : "CATEGORY_HIERARCHY",
+    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
+    "column" : "CATEG_LVL2_NAME",
+    "derived" : null
+  }, {
+    "name" : "CATEGORY_HIERARCHY",
+    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
+    "column" : "CATEG_LVL3_NAME",
+    "derived" : null
+  }, {
+    "name" : "LSTG_FORMAT_NAME",
+    "table" : "DEFAULT.TEST_KYLIN_FACT",
+    "column" : "LSTG_FORMAT_NAME",
+    "derived" : null
+  }, {
+    "name" : "SITE_ID",
+    "table" : "EDW.TEST_SITES",
+    "column" : "{FK}",
+    "derived" : [ "SITE_NAME", "CRE_USER" ]
+  }, {
+    "name" : "SELLER_TYPE_CD",
+    "table" : "EDW.TEST_SELLER_TYPE_DIM",
+    "column" : "{FK}",
+    "derived" : [ "SELLER_TYPE_DESC" ]
+  } ],
+  "measures" : [ {
+    "name" : "GMV_SUM",
+    "function" : {
+      "expression" : "SUM",
+      "parameter" : {
+        "type" : "column",
+        "value" : "PRICE",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "decimal(19,4)"
     },
-    {
-      "name": "GMV_MIN",
-      "function": {
-        "expression": "MIN",
-        "parameter": {
-          "type": "column",
-          "value": "PRICE",
-          "next_parameter": null
-        },
-        "returntype": "decimal(19,4)"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "GMV_MIN",
+    "function" : {
+      "expression" : "MIN",
+      "parameter" : {
+        "type" : "column",
+        "value" : "PRICE",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "decimal(19,4)"
     },
-    {
-      "name": "GMV_MAX",
-      "function": {
-        "expression": "MAX",
-        "parameter": {
-          "type": "column",
-          "value": "PRICE",
-          "next_parameter": null
-        },
-        "returntype": "decimal(19,4)"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "GMV_MAX",
+    "function" : {
+      "expression" : "MAX",
+      "parameter" : {
+        "type" : "column",
+        "value" : "PRICE",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "decimal(19,4)"
     },
-    {
-      "name": "TRANS_CNT",
-      "function": {
-        "expression": "COUNT",
-        "parameter": {
-          "type": "constant",
-          "value": "1",
-          "next_parameter": null
-        },
-        "returntype": "bigint"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "TRANS_CNT",
+    "function" : {
+      "expression" : "COUNT",
+      "parameter" : {
+        "type" : "constant",
+        "value" : "1",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "bigint"
     },
-    {
-      "name": "ITEM_COUNT_SUM",
-      "function": {
-        "expression": "SUM",
-        "parameter": {
-          "type": "column",
-          "value": "ITEM_COUNT",
-          "next_parameter": null
-        },
-        "returntype": "bigint"
-      },
-      "dependent_measure_ref": null
-    }
-  ],
-  "rowkey": {
-    "rowkey_columns": [
-      {
-        "column": "cal_dt",
-        "encoding": "dict"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "ITEM_COUNT_SUM",
+    "function" : {
+      "expression" : "SUM",
+      "parameter" : {
+        "type" : "column",
+        "value" : "ITEM_COUNT",
+        "next_parameter" : null
       },
-      {
-        "column": "leaf_categ_id",
-        "encoding": "dict"
-      },
-      {
-        "column": "meta_categ_name",
-        "encoding": "dict"
-      },
-      {
-        "column": "categ_lvl2_name",
-        "encoding": "dict"
-      },
-      {
-        "column": "categ_lvl3_name",
-        "encoding": "dict"
-      },
-      {
-        "column": "lstg_format_name",
-        "encoding": "dict"
-      },
-      {
-        "column": "lstg_site_id",
-        "encoding": "dict"
-      },
-      {
-        "column": "slr_segment_cd",
-        "encoding": "dict"
-      }
-    ]
+      "returntype" : "bigint"
+    },
+    "dependent_measure_ref" : null
+  }],
+  "rowkey" : {
+    "rowkey_columns" : [ {
+      "column" : "cal_dt",
+      "encoding" : "dict"
+    }, {
+      "column" : "leaf_categ_id",
+      "encoding" : "dict"
+    }, {
+      "column" : "meta_categ_name",
+      "encoding" : "dict"
+    }, {
+      "column" : "categ_lvl2_name",
+      "encoding" : "dict"
+    }, {
+      "column" : "categ_lvl3_name",
+      "encoding" : "dict"
+    }, {
+      "column" : "lstg_format_name",
+      "encoding" : "dict"
+    }, {
+      "column" : "lstg_site_id",
+      "encoding" : "dict"
+    }, {
+      "column" : "slr_segment_cd",
+      "encoding" : "dict"
+    } ]
   },
-  "signature": null,
-  "last_modified": 1448959801311,
-  "model_name": "test_kylin_left_join_view_model_desc",
-  "null_string": null,
-  "hbase_mapping": {
-    "column_family": [
-      {
-        "name": "f1",
-        "columns": [
-          {
-            "qualifier": "m",
-            "measure_refs": [
-              "gmv_sum",
-              "gmv_min",
-              "gmv_max",
-              "trans_cnt",
-              "item_count_sum"
-            ]
-          }
-        ]
-      }
-    ]
+  "signature" : null,
+  "last_modified" : 1448959801311,
+  "model_name" : "test_kylin_left_join_view_model_desc",
+  "null_string" : null,
+  "hbase_mapping" : {
+    "column_family" : [ {
+      "name" : "f1",
+      "columns" : [ {
+        "qualifier" : "m",
+        "measure_refs" : [ "gmv_sum", "gmv_min", "gmv_max", "trans_cnt", "item_count_sum" ]
+      } ]
+    }]
   },
-  "aggregation_groups": [
-    {
-      "includes": [
-        "cal_dt",
-        "categ_lvl2_name",
-        "categ_lvl3_name",
-        "leaf_categ_id",
-        "lstg_format_name",
-        "lstg_site_id",
-        "meta_categ_name"
-      ],
-      "select_rule": {
-        "hierarchy_dims": [],
-        "mandatory_dims": [
-          "cal_dt"
-        ],
-        "joint_dims": [
-          [
-            "categ_lvl2_name",
-            "categ_lvl3_name",
-            "leaf_categ_id",
-            "meta_categ_name"
-          ]
-        ]
-      }
-    },
-    {
-      "includes": [
-        "cal_dt",
-        "categ_lvl2_name",
-        "categ_lvl3_name",
-        "leaf_categ_id",
-        "meta_categ_name"
-      ],
-      "select_rule": {
-        "hierarchy_dims": [
-          [
-            "META_CATEG_NAME",
-            "CATEG_LVL2_NAME",
-            "CATEG_LVL3_NAME"
-          ]
-        ],
-        "mandatory_dims": [
-          "cal_dt"
-        ],
-        "joint_dims": []
-      }
+  "aggregation_groups" : [ {
+    "includes" : [ "cal_dt", "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "lstg_format_name", "lstg_site_id", "meta_categ_name"],
+    "select_rule" : {
+      "hierarchy_dims" : [ ],
+      "mandatory_dims" : [ "cal_dt" ],
+      "joint_dims" : [ [ "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "meta_categ_name" ] ]
+    }
+  }, {
+    "includes" : [ "cal_dt", "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "meta_categ_name" ],
+    "select_rule" : {
+      "hierarchy_dims" : [ [ "META_CATEG_NAME", "CATEG_LVL2_NAME", "CATEG_LVL3_NAME" ] ],
+      "mandatory_dims" : [ "cal_dt" ],
+      "joint_dims" : [ ]
     }
-  ],
-  "notify_list": null,
-  "status_need_notify": [],
-  "auto_merge_time_ranges": null,
-  "retention_range": 0,
-  "engine_type": 2,
-  "storage_type": 2,
+  } ],
+  "notify_list" : null,
+  "status_need_notify" : [ ],
+  "auto_merge_time_ranges" : null,
+  "retention_range" : 0,
+  "engine_type" : 2,
+  "storage_type" : 2,
   "partition_date_start": 0
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/d7a3fdf5/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_desc.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_desc.json b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_desc.json
index d185175..28328e4 100644
--- a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_desc.json
+++ b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_desc.json
@@ -1,4 +1,5 @@
 {
+ 
   "uuid": "9ac9b7a8-3929-4dff-b59d-2100aadc8dbf",
   "name": "test_kylin_cube_without_slr_desc",
   "description": null,
@@ -159,19 +160,54 @@
         "returntype": "extendedcolumn(100)"
       },
       "dependent_measure_ref": null
-    },
-    {
-      "name": "PRICE_RAW",
-      "function": {
-        "expression": "RAW",
-        "parameter": {
-          "type": "column",
-          "value": "PRICE",
-          "next_parameter": null
+    }, {
+      "name" : "CAL_DT_RAW",
+      "function" : {
+        "expression" : "RAW",
+        "parameter" : {
+          "type" : "column",
+          "value" : "CAL_DT",
+          "next_parameter" : null
         },
-        "returntype": "raw"
+        "returntype" : "raw"
       },
-      "dependent_measure_ref": null
+      "dependent_measure_ref" : null
+    }, {
+      "name" : "LSTG_FORMAT_NAME_RAW",
+      "function" : {
+        "expression" : "RAW",
+        "parameter" : {
+          "type" : "column",
+          "value" : "LSTG_FORMAT_NAME",
+          "next_parameter" : null
+        },
+        "returntype" : "raw"
+      },
+      "dependent_measure_ref" : null
+    }, {
+      "name" : "LEAF_CATEG_ID_RAW",
+      "function" : {
+        "expression" : "RAW",
+        "parameter" : {
+          "type" : "column",
+          "value" : "LEAF_CATEG_ID",
+          "next_parameter" : null
+        },
+        "returntype" : "raw"
+      },
+      "dependent_measure_ref" : null
+    }, {
+      "name" : "PRICE_RAW",
+      "function" : {
+        "expression" : "RAW",
+        "parameter" : {
+          "type" : "column",
+          "value" : "PRICE",
+          "next_parameter" : null
+        },
+        "returntype" : "raw"
+      },
+      "dependent_measure_ref" : null
     }
   ],
   "rowkey": {
@@ -234,6 +270,9 @@
               "item_count_sum",
               "SITE_EXTENDED_1",
               "SITE_EXTENDED_2",
+              "CAL_DT_RAW",
+              "LSTG_FORMAT_NAME_RAW",
+              "LEAF_CATEG_ID_RAW",
               "PRICE_RAW"
             ]
           }

http://git-wip-us.apache.org/repos/asf/kylin/blob/d7a3fdf5/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json
index 2aea1a8..ca1b35c 100644
--- a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json
+++ b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json
@@ -1,357 +1,293 @@
 {
-  "uuid": "9ac9b7a8-3929-4dff-b59d-2100aadc8dbf",
-  "name": "test_kylin_cube_without_slr_left_join_desc",
-  "description": null,
-  "dimensions": [
-    {
-      "name": "CAL_DT",
-      "table": "EDW.TEST_CAL_DT",
-      "column": "{FK}",
-      "derived": [
-        "WEEK_BEG_DT"
-      ]
-    },
-    {
-      "name": "CATEGORY",
-      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
-      "column": "{FK}",
-      "derived": [
-        "USER_DEFINED_FIELD1",
-        "USER_DEFINED_FIELD3",
-        "UPD_DATE",
-        "UPD_USER"
-      ]
-    },
-    {
-      "name": "CATEGORY_HIERARCHY",
-      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
-      "column": "META_CATEG_NAME",
-      "derived": null
-    },
-    {
-      "name": "CATEGORY_HIERARCHY",
-      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
-      "column": "CATEG_LVL2_NAME",
-      "derived": null
-    },
-    {
-      "name": "CATEGORY_HIERARCHY",
-      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
-      "column": "CATEG_LVL3_NAME",
-      "derived": null
+  "uuid" : "9ac9b7a8-3929-4dff-b59d-2100aadc8dbf",
+  "name" : "test_kylin_cube_without_slr_left_join_desc",
+  "description" : null,
+  "dimensions" : [ {
+    "name" : "CAL_DT",
+    "table" : "EDW.TEST_CAL_DT",
+    "column" : "{FK}",
+    "derived" : [ "WEEK_BEG_DT" ]
+  }, {
+    "name" : "CATEGORY",
+    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
+    "column" : "{FK}",
+    "derived" : [ "USER_DEFINED_FIELD1", "USER_DEFINED_FIELD3", "UPD_DATE", "UPD_USER" ]
+  }, {
+    "name" : "CATEGORY_HIERARCHY",
+    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
+    "column" : "META_CATEG_NAME",
+    "derived" : null
+  }, {
+    "name" : "CATEGORY_HIERARCHY",
+    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
+    "column" : "CATEG_LVL2_NAME",
+    "derived" : null
+  }, {
+    "name" : "CATEGORY_HIERARCHY",
+    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
+    "column" : "CATEG_LVL3_NAME",
+    "derived" : null
+  }, {
+    "name" : "LSTG_FORMAT_NAME",
+    "table" : "DEFAULT.TEST_KYLIN_FACT",
+    "column" : "LSTG_FORMAT_NAME",
+    "derived" : null
+  }, {
+    "name" : "SITE_ID",
+    "table" : "EDW.TEST_SITES",
+    "column" : "{FK}",
+    "derived" : [ "SITE_NAME", "CRE_USER" ]
+  }, {
+    "name" : "SELLER_TYPE_CD",
+    "table" : "EDW.TEST_SELLER_TYPE_DIM",
+    "column" : "{FK}",
+    "derived" : [ "SELLER_TYPE_DESC" ]
+  } ],
+  "measures" : [ {
+    "name" : "GMV_SUM",
+    "function" : {
+      "expression" : "SUM",
+      "parameter" : {
+        "type" : "column",
+        "value" : "PRICE",
+        "next_parameter" : null
+      },
+      "returntype" : "decimal(19,4)"
     },
-    {
-      "name": "LSTG_FORMAT_NAME",
-      "table": "DEFAULT.TEST_KYLIN_FACT",
-      "column": "LSTG_FORMAT_NAME",
-      "derived": null
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "GMV_MIN",
+    "function" : {
+      "expression" : "MIN",
+      "parameter" : {
+        "type" : "column",
+        "value" : "PRICE",
+        "next_parameter" : null
+      },
+      "returntype" : "decimal(19,4)"
     },
-    {
-      "name": "SITE_ID",
-      "table": "EDW.TEST_SITES",
-      "column": "{FK}",
-      "derived": [
-        "SITE_NAME",
-        "CRE_USER"
-      ]
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "GMV_MAX",
+    "function" : {
+      "expression" : "MAX",
+      "parameter" : {
+        "type" : "column",
+        "value" : "PRICE",
+        "next_parameter" : null
+      },
+      "returntype" : "decimal(19,4)"
     },
-    {
-      "name": "SELLER_TYPE_CD",
-      "table": "EDW.TEST_SELLER_TYPE_DIM",
-      "column": "{FK}",
-      "derived": [
-        "SELLER_TYPE_DESC"
-      ]
-    }
-  ],
-  "measures": [
-    {
-      "name": "GMV_SUM",
-      "function": {
-        "expression": "SUM",
-        "parameter": {
-          "type": "column",
-          "value": "PRICE",
-          "next_parameter": null
-        },
-        "returntype": "decimal(19,4)"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "TRANS_CNT",
+    "function" : {
+      "expression" : "COUNT",
+      "parameter" : {
+        "type" : "constant",
+        "value" : "1",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "bigint"
     },
-    {
-      "name": "GMV_MIN",
-      "function": {
-        "expression": "MIN",
-        "parameter": {
-          "type": "column",
-          "value": "PRICE",
-          "next_parameter": null
-        },
-        "returntype": "decimal(19,4)"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "ITEM_COUNT_SUM",
+    "function" : {
+      "expression" : "SUM",
+      "parameter" : {
+        "type" : "column",
+        "value" : "ITEM_COUNT",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "bigint"
     },
-    {
-      "name": "GMV_MAX",
-      "function": {
-        "expression": "MAX",
-        "parameter": {
-          "type": "column",
-          "value": "PRICE",
-          "next_parameter": null
-        },
-        "returntype": "decimal(19,4)"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "SELLER_CNT_BITMAP",
+    "function" : {
+      "expression" : "COUNT_DISTINCT",
+      "parameter" : {
+        "type" : "column",
+        "value" : "SELLER_ID",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "bitmap"
     },
-    {
-      "name": "TRANS_CNT",
-      "function": {
-        "expression": "COUNT",
-        "parameter": {
-          "type": "constant",
-          "value": "1",
-          "next_parameter": null
-        },
-        "returntype": "bigint"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "SITE_NAME_BITMAP",
+    "function" : {
+      "expression" : "COUNT_DISTINCT",
+      "parameter" : {
+        "type" : "column",
+        "value" : "SITE_NAME",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "bitmap"
     },
-    {
-      "name": "ITEM_COUNT_SUM",
-      "function": {
-        "expression": "SUM",
-        "parameter": {
-          "type": "column",
-          "value": "ITEM_COUNT",
-          "next_parameter": null
-        },
-        "returntype": "bigint"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "SELLER_FORMAT_CNT",
+    "function" : {
+      "expression" : "COUNT_DISTINCT",
+      "parameter" : {
+        "type" : "column",
+        "value" : "LSTG_FORMAT_NAME",
+        "next_parameter" : {
+          "type" : "column",
+          "value" : "SELLER_ID",
+          "next_parameter" : null
+        }
       },
-      "dependent_measure_ref": null
+      "returntype" : "hllc(10)"
     },
-    {
-      "name": "SELLER_CNT_BITMAP",
-      "function": {
-        "expression": "COUNT_DISTINCT",
-        "parameter": {
-          "type": "column",
-          "value": "SELLER_ID",
-          "next_parameter": null
-        },
-        "returntype": "bitmap"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "TOP_SELLER",
+    "function" : {
+      "expression" : "TOP_N",
+      "parameter" : {
+        "type" : "column",
+        "value" : "PRICE",
+        "next_parameter" : {
+          "type" : "column",
+          "value" : "SELLER_ID",
+          "next_parameter" : null
+        }
       },
-      "dependent_measure_ref": null
+      "returntype" : "topn(100)",
+      "configuration": {"topn.encoding.SELLER_ID" : "int:4"}
     },
-    {
-      "name": "SITE_NAME_BITMAP",
-      "function": {
-        "expression": "COUNT_DISTINCT",
-        "parameter": {
-          "type": "column",
-          "value": "SITE_NAME",
-          "next_parameter": null
-        },
-        "returntype": "bitmap"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "CAL_DT_RAW",
+    "function" : {
+      "expression" : "RAW",
+      "parameter" : {
+        "type" : "column",
+        "value" : "CAL_DT",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "raw"
     },
-    {
-      "name": "SELLER_FORMAT_CNT",
-      "function": {
-        "expression": "COUNT_DISTINCT",
-        "parameter": {
-          "type": "column",
-          "value": "LSTG_FORMAT_NAME",
-          "next_parameter": {
-            "type": "column",
-            "value": "SELLER_ID",
-            "next_parameter": null
-          }
-        },
-        "returntype": "hllc(10)"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "LSTG_FORMAT_NAME_RAW",
+    "function" : {
+      "expression" : "RAW",
+      "parameter" : {
+        "type" : "column",
+        "value" : "LSTG_FORMAT_NAME",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "raw"
     },
-    {
-      "name": "TOP_SELLER",
-      "function": {
-        "expression": "TOP_N",
-        "parameter": {
-          "type": "column",
-          "value": "PRICE",
-          "next_parameter": {
-            "type": "column",
-            "value": "SELLER_ID",
-            "next_parameter": null
-          }
-        },
-        "returntype": "topn(100)",
-        "configuration": {
-          "topn.encoding.SELLER_ID": "int:4"
-        }
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "LEAF_CATEG_ID_RAW",
+    "function" : {
+      "expression" : "RAW",
+      "parameter" : {
+        "type" : "column",
+        "value" : "LEAF_CATEG_ID",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "raw"
     },
-    {
-      "name": "PRICE_RAW",
-      "function": {
-        "expression": "RAW",
-        "parameter": {
-          "type": "column",
-          "value": "PRICE",
-          "next_parameter": null
-        },
-        "returntype": "raw"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "PRICE_RAW",
+    "function" : {
+      "expression" : "RAW",
+      "parameter" : {
+        "type" : "column",
+        "value" : "PRICE",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
-    }
-  ],
-  "dictionaries": [
+      "returntype" : "raw"
+    },
+    "dependent_measure_ref" : null
+  } ],
+  "dictionaries" : [
     {
-      "column": "SITE_NAME",
+      "column" : "SITE_NAME",
       "builder": "org.apache.kylin.dict.GlobalDictionaryBuilder"
     }
   ],
-  "rowkey": {
-    "rowkey_columns": [
-      {
-        "column": "cal_dt",
-        "encoding": "dict"
-      },
-      {
-        "column": "leaf_categ_id",
-        "encoding": "dict"
-      },
-      {
-        "column": "meta_categ_name",
-        "encoding": "dict"
-      },
-      {
-        "column": "categ_lvl2_name",
-        "encoding": "dict"
-      },
-      {
-        "column": "categ_lvl3_name",
-        "encoding": "dict"
-      },
-      {
-        "column": "lstg_format_name",
-        "encoding": "dict"
-      },
-      {
-        "column": "lstg_site_id",
-        "encoding": "dict"
-      },
-      {
-        "column": "slr_segment_cd",
-        "encoding": "dict"
-      }
-    ]
+  "rowkey" : {
+    "rowkey_columns" : [ {
+      "column" : "cal_dt",
+      "encoding" : "dict"
+    }, {
+      "column" : "leaf_categ_id",
+      "encoding" : "dict"
+    }, {
+      "column" : "meta_categ_name",
+      "encoding" : "dict"
+    }, {
+      "column" : "categ_lvl2_name",
+      "encoding" : "dict"
+    }, {
+      "column" : "categ_lvl3_name",
+      "encoding" : "dict"
+    }, {
+      "column" : "lstg_format_name",
+      "encoding" : "dict"
+    }, {
+      "column" : "lstg_site_id",
+      "encoding" : "dict"
+    }, {
+      "column" : "slr_segment_cd",
+      "encoding" : "dict"
+    } ]
   },
-  "signature": null,
-  "last_modified": 1448959801311,
-  "model_name": "test_kylin_left_join_model_desc",
-  "null_string": null,
-  "hbase_mapping": {
-    "column_family": [
-      {
-        "name": "f1",
-        "columns": [
-          {
-            "qualifier": "m",
-            "measure_refs": [
-              "gmv_sum",
-              "gmv_min",
-              "gmv_max",
-              "trans_cnt",
-              "item_count_sum",
-              "PRICE_RAW"
-            ]
-          }
-        ]
-      },
-      {
-        "name": "f2",
-        "columns": [
-          {
-            "qualifier": "m",
-            "measure_refs": [
-              "seller_cnt_bitmap",
-              "site_name_bitmap",
-              "seller_format_cnt"
-            ]
-          }
-        ]
-      },
-      {
-        "name": "f3",
-        "columns": [
-          {
-            "qualifier": "m",
-            "measure_refs": [
-              "top_seller"
-            ]
-          }
-        ]
-      }
-    ]
+  "signature" : null,
+  "last_modified" : 1448959801311,
+  "model_name" : "test_kylin_left_join_model_desc",
+  "null_string" : null,
+  "hbase_mapping" : {
+    "column_family" : [ {
+      "name" : "f1",
+      "columns" : [ {
+        "qualifier" : "m",
+        "measure_refs" : [ "gmv_sum", "gmv_min", "gmv_max", "trans_cnt", "item_count_sum", "CAL_DT_RAW", "LSTG_FORMAT_NAME_RAW", "LEAF_CATEG_ID_RAW", "PRICE_RAW" ]
+      } ]
+    }, {
+      "name" : "f2",
+      "columns" : [ {
+        "qualifier" : "m",
+        "measure_refs" : [ "seller_cnt_bitmap", "site_name_bitmap", "seller_format_cnt"]
+      } ]
+    }, {
+      "name" : "f3",
+      "columns" : [ {
+        "qualifier" : "m",
+        "measure_refs" : [ "top_seller" ]
+      } ]
+    } ]
   },
-  "aggregation_groups": [
-    {
-      "includes": [
-        "cal_dt",
-        "categ_lvl2_name",
-        "categ_lvl3_name",
-        "leaf_categ_id",
-        "lstg_format_name",
-        "lstg_site_id",
-        "meta_categ_name"
-      ],
-      "select_rule": {
-        "hierarchy_dims": [],
-        "mandatory_dims": [
-          "cal_dt"
-        ],
-        "joint_dims": [
-          [
-            "categ_lvl2_name",
-            "categ_lvl3_name",
-            "leaf_categ_id",
-            "meta_categ_name"
-          ]
-        ]
-      }
-    },
-    {
-      "includes": [
-        "cal_dt",
-        "categ_lvl2_name",
-        "categ_lvl3_name",
-        "leaf_categ_id",
-        "meta_categ_name"
-      ],
-      "select_rule": {
-        "hierarchy_dims": [
-          [
-            "META_CATEG_NAME",
-            "CATEG_LVL2_NAME",
-            "CATEG_LVL3_NAME"
-          ]
-        ],
-        "mandatory_dims": [
-          "cal_dt"
-        ],
-        "joint_dims": []
-      }
+  "aggregation_groups" : [ {
+    "includes" : [ "cal_dt", "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "lstg_format_name", "lstg_site_id", "meta_categ_name"],
+    "select_rule" : {
+      "hierarchy_dims" : [ ],
+      "mandatory_dims" : [ "cal_dt" ],
+      "joint_dims" : [ [ "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "meta_categ_name" ] ]
     }
-  ],
-  "notify_list": null,
-  "status_need_notify": [],
-  "auto_merge_time_ranges": null,
-  "retention_range": 0,
-  "engine_type": 2,
-  "storage_type": 2,
+  }, {
+    "includes" : [ "cal_dt", "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "meta_categ_name" ],
+    "select_rule" : {
+      "hierarchy_dims" : [ [ "META_CATEG_NAME", "CATEG_LVL2_NAME", "CATEG_LVL3_NAME" ] ],
+      "mandatory_dims" : [ "cal_dt" ],
+      "joint_dims" : [ ]
+    }
+  } ],
+  "notify_list" : null,
+  "status_need_notify" : [ ],
+  "auto_merge_time_ranges" : null,
+  "retention_range" : 0,
+  "engine_type" : 2,
+  "storage_type" : 2,
   "override_kylin_properties": {
     "kylin.job.cubing.inmem.sampling.hll.precision": "16"
   },

http://git-wip-us.apache.org/repos/asf/kylin/blob/d7a3fdf5/examples/test_case_data/localmeta/cube_desc/test_streaming_table_cube_desc.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/test_streaming_table_cube_desc.json b/examples/test_case_data/localmeta/cube_desc/test_streaming_table_cube_desc.json
index f2c4e72..ef10c1e 100644
--- a/examples/test_case_data/localmeta/cube_desc/test_streaming_table_cube_desc.json
+++ b/examples/test_case_data/localmeta/cube_desc/test_streaming_table_cube_desc.json
@@ -1,155 +1,118 @@
 {
-  "uuid": "901ed15e-7769-4c66-b7ae-fbdc971cd192",
-  "name": "test_streaming_table_cube_desc",
-  "description": "",
-  "dimensions": [
-    {
-      "name": "DEFAULT.STREAMING_TABLE.SITE",
-      "table": "DEFAULT.STREAMING_TABLE",
-      "column": "SITE",
-      "derived": null
-    },
-    {
-      "name": "DEFAULT.STREAMING_TABLE.ITM",
-      "table": "DEFAULT.STREAMING_TABLE",
-      "column": "ITM",
-      "derived": null
-    },
-    {
-      "name": "TIME",
-      "table": "DEFAULT.STREAMING_TABLE",
-      "column": "DAY_START",
-      "derived": null
-    },
-    {
-      "name": "TIME",
-      "table": "DEFAULT.STREAMING_TABLE",
-      "column": "HOUR_START",
-      "derived": null
-    },
-    {
-      "name": "TIME",
-      "table": "DEFAULT.STREAMING_TABLE",
-      "column": "MINUTE_START",
-      "derived": null
-    }
-  ],
-  "measures": [
-    {
-      "name": "_COUNT_",
-      "function": {
-        "expression": "COUNT",
-        "parameter": {
-          "type": "constant",
-          "value": "1",
-          "next_parameter": null
-        },
-        "returntype": "bigint"
+  "uuid" : "901ed15e-7769-4c66-b7ae-fbdc971cd192",
+ 
+  "name" : "test_streaming_table_cube_desc",
+  "description" : "",
+  "dimensions" : [ {
+    "name" : "DEFAULT.STREAMING_TABLE.SITE",
+    "table" : "DEFAULT.STREAMING_TABLE",
+    "column" : "SITE",
+    "derived" : null
+  }, {
+    "name" : "DEFAULT.STREAMING_TABLE.ITM",
+    "table" : "DEFAULT.STREAMING_TABLE",
+    "column" : "ITM",
+    "derived" : null
+  }, {
+    "name" : "TIME",
+    "table" : "DEFAULT.STREAMING_TABLE",
+    "column" : "DAY_START",
+    "derived" : null
+  }, {
+    "name" : "TIME",
+    "table" : "DEFAULT.STREAMING_TABLE",
+    "column" : "HOUR_START",
+    "derived" : null
+  }, {
+    "name" : "TIME",
+    "table" : "DEFAULT.STREAMING_TABLE",
+    "column" : "MINUTE_START",
+    "derived" : null
+  } ],
+  "measures" : [ {
+    "name" : "_COUNT_",
+    "function" : {
+      "expression" : "COUNT",
+      "parameter" : {
+        "type" : "constant",
+        "value" : "1",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "bigint"
     },
-    {
-      "name": "GMV_SUM",
-      "function": {
-        "expression": "SUM",
-        "parameter": {
-          "type": "column",
-          "value": "GMV",
-          "next_parameter": null
-        },
-        "returntype": "decimal(19,6)"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "GMV_SUM",
+    "function" : {
+      "expression" : "SUM",
+      "parameter" : {
+        "type" : "column",
+        "value" : "GMV",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
+      "returntype" : "decimal(19,6)"
     },
-    {
-      "name": "ITEM_COUNT_SUM",
-      "function": {
-        "expression": "SUM",
-        "parameter": {
-          "type": "column",
-          "value": "ITEM_COUNT",
-          "next_parameter": null
-        },
-        "returntype": "bigint"
+    "dependent_measure_ref" : null
+  }, {
+    "name" : "ITEM_COUNT_SUM",
+    "function" : {
+      "expression" : "SUM",
+      "parameter" : {
+        "type" : "column",
+        "value" : "ITEM_COUNT",
+        "next_parameter" : null
       },
-      "dependent_measure_ref": null
-    }
-  ],
-  "rowkey": {
-    "rowkey_columns": [
-      {
-        "column": "DAY_START",
-        "encoding": "dict"
-      },
-      {
-        "column": "HOUR_START",
-        "encoding": "dict"
-      },
-      {
-        "column": "MINUTE_START",
-        "encoding": "dict"
-      },
-      {
-        "column": "SITE",
-        "encoding": "dict"
-      },
-      {
-        "column": "ITM",
-        "encoding": "dict"
-      }
-    ]
+      "returntype" : "bigint"
+    },
+    "dependent_measure_ref" : null
+  } ],
+  "rowkey" : {
+    "rowkey_columns" : [ {
+      "column" : "DAY_START",
+      "encoding" : "dict"
+    }, {
+      "column" : "HOUR_START",
+      "encoding" : "dict"
+    }, {
+      "column" : "MINUTE_START",
+      "encoding" : "dict"
+    }, {
+      "column" : "SITE",
+      "encoding" : "dict"
+    }, {
+      "column" : "ITM",
+      "encoding" : "dict"
+    } ]
   },
-  "signature": null,
-  "last_modified": 1448959801314,
-  "model_name": "test_streaming_table_model_desc",
-  "null_string": null,
-  "hbase_mapping": {
-    "column_family": [
-      {
-        "name": "F1",
-        "columns": [
-          {
-            "qualifier": "M",
-            "measure_refs": [
-              "_COUNT_",
-              "GMV_SUM",
-              "ITEM_COUNT_SUM"
-            ]
-          }
-        ]
-      }
-    ]
+  "signature" : null,
+  "last_modified" : 1448959801314,
+  "model_name" : "test_streaming_table_model_desc",
+  "null_string" : null,
+  "hbase_mapping" : {
+    "column_family" : [ {
+      "name" : "F1",
+      "columns" : [ {
+        "qualifier" : "M",
+        "measure_refs" : [ "_COUNT_", "GMV_SUM", "ITEM_COUNT_SUM" ]
+      } ]
+    } ]
   },
-  "aggregation_groups": [
-    {
-      "includes": [
-        "DAY_START",
-        "HOUR_START",
-        "ITM",
-        "MINUTE_START",
-        "SITE"
-      ],
-      "select_rule": {
-        "hierarchy_dims": [
-          [
-            "DAY_START",
-            "HOUR_START",
-            "MINUTE_START"
-          ]
-        ],
-        "mandatory_dims": [],
-        "joint_dims": []
-      }
+  "aggregation_groups" : [ {
+    "includes" : [ "DAY_START", "HOUR_START", "ITM", "MINUTE_START", "SITE" ],
+    "select_rule" : {
+      "hierarchy_dims" : [ [ "DAY_START", "HOUR_START", "MINUTE_START" ] ],
+      "mandatory_dims" : [ ],
+      "joint_dims" : [ ]
     }
-  ],
+  } ],
   "override_kylin_properties": {
     "kylin.cube.algorithm": "inmem"
   },
-  "notify_list": [],
-  "status_need_notify": [],
-  "auto_merge_time_ranges": null,
-  "retention_range": 0,
-  "engine_type": 2,
-  "storage_type": 2,
+  "notify_list" : [ ],
+  "status_need_notify" : [ ],
+  "auto_merge_time_ranges" : null,
+  "retention_range" : 0,
+  "engine_type" : 2,
+  "storage_type" : 2,
   "partition_date_start": 0
 }
\ No newline at end of file


[17/50] [abbrv] kylin git commit: KYLIN-2004 Make the creating intermediate hive table steps configurable

Posted by sh...@apache.org.
KYLIN-2004 Make the creating intermediate hive table steps configurable

Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/233a699f
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/233a699f
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/233a699f

Branch: refs/heads/KYLIN-1726
Commit: 233a699f3b6f7a6c64ecf43fb80108b56db61f5f
Parents: d7cbf67
Author: shaofengshi <sh...@apache.org>
Authored: Fri Sep 9 19:04:10 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Sat Sep 10 17:59:46 2016 +0800

----------------------------------------------------------------------
 .../apache/kylin/common/KylinConfigBase.java    |   4 +-
 .../org/apache/kylin/job/JoinedFlatTable.java   |  48 ++++--
 .../kylin/job/constant/ExecutableConstants.java |   1 +
 .../kylin/job/execution/AbstractExecutable.java |   2 +-
 .../apache/kylin/job/JoinedFlatTableTest.java   |   2 +-
 .../kylin/metadata/model/DataModelDesc.java     |   8 +-
 ...t_kylin_cube_without_slr_left_join_desc.json |   3 +-
 .../kylin/rest/controller/CubeController.java   |   2 +-
 .../source/hive/CreateFlatHiveTableStep.java    |  32 +++-
 .../apache/kylin/source/hive/HiveMRInput.java   | 169 ++++++++++++++++++-
 10 files changed, 234 insertions(+), 37 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/233a699f/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 2ac9d48..de9051c 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -805,7 +805,7 @@ abstract public class KylinConfigBase implements Serializable {
         setProperty("kylin.dict.append.cache.size", String.valueOf(cacheSize));
     }
 
-    public boolean getTableJoinTypeCheck() {
-        return Boolean.valueOf(this.getOptional("kylin.table.join.strong.check", "true"));
+    public String getCreateFlatHiveTableMethod() {
+        return getOptional("kylin.hive.create.flat.table.method", "1");
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/233a699f/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java b/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
index b39265d..699d084 100644
--- a/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
@@ -107,14 +107,14 @@ public class JoinedFlatTable {
         return ddl.toString();
     }
 
-    public static String generateInsertDataStatement(IJoinedFlatTableDesc intermediateTableDesc, JobEngineConfig engineConfig) {
+    public static String generateInsertDataStatement(IJoinedFlatTableDesc intermediateTableDesc, JobEngineConfig engineConfig, boolean redistribute) {
         StringBuilder sql = new StringBuilder();
         sql.append(generateHiveSetStatements(engineConfig));
-        sql.append("INSERT OVERWRITE TABLE " + intermediateTableDesc.getTableName() + " " + generateSelectDataStatement(intermediateTableDesc) + ";").append("\n");
+        sql.append("INSERT OVERWRITE TABLE " + intermediateTableDesc.getTableName() + " " + generateSelectDataStatement(intermediateTableDesc, redistribute) + ";").append("\n");
         return sql.toString();
     }
 
-    public static String generateSelectDataStatement(IJoinedFlatTableDesc flatDesc) {
+    public static String generateSelectDataStatement(IJoinedFlatTableDesc flatDesc, boolean redistribute) {
         StringBuilder sql = new StringBuilder();
         sql.append("SELECT" + "\n");
         String tableAlias;
@@ -129,7 +129,15 @@ public class JoinedFlatTable {
         }
         appendJoinStatement(flatDesc, sql, tableAliasMap);
         appendWhereStatement(flatDesc, sql, tableAliasMap);
-        appendDistributeStatement(flatDesc, sql, tableAliasMap);
+        if (redistribute == true) {
+            String redistributeCol = null;
+            TblColRef distDcol = flatDesc.getDistributedBy();
+            if (distDcol != null) {
+                String tblAlias = tableAliasMap.get(distDcol.getTable());
+                redistributeCol = tblAlias + "." + distDcol.getName();
+            }
+            appendDistributeStatement(sql, redistributeCol);
+        }
         return sql.toString();
     }
 
@@ -228,14 +236,11 @@ public class JoinedFlatTable {
         return result;
     }
 
-    private static void appendDistributeStatement(IJoinedFlatTableDesc flatDesc, StringBuilder sql, Map<String, String> tableAliasMap) {
-        TblColRef distDcol = flatDesc.getDistributedBy();
-
-        if (distDcol != null) {
-            String tblAlias = tableAliasMap.get(distDcol.getTable());
-            sql.append(" DISTRIBUTE BY ").append(tblAlias).append(".").append(distDcol.getName());
+    private static void appendDistributeStatement(StringBuilder sql, String redistributeCol) {
+        if (redistributeCol != null) {
+            sql.append(" DISTRIBUTE BY ").append(redistributeCol).append(";\n");
         } else {
-            sql.append(" DISTRIBUTE BY RAND()");
+            sql.append(" DISTRIBUTE BY RAND()").append(";\n");
         }
     }
 
@@ -280,4 +285,25 @@ public class JoinedFlatTable {
         return hiveDataType.toLowerCase();
     }
 
+    public static String generateSelectRowCountStatement(IJoinedFlatTableDesc intermediateTableDesc, String outputDir) {
+        StringBuilder sql = new StringBuilder();
+        sql.append("set hive.exec.compress.output=false;\n");
+        sql.append("INSERT OVERWRITE DIRECTORY '" + outputDir + "' SELECT count(*) FROM " + intermediateTableDesc.getTableName() + ";\n");
+        return sql.toString();
+    }
+
+    public static String generateRedistributeFlatTableStatement(IJoinedFlatTableDesc intermediateTableDesc) {
+        final String tableName = intermediateTableDesc.getTableName();
+        StringBuilder sql = new StringBuilder();
+        sql.append("INSERT OVERWRITE TABLE " + tableName + " SELECT * FROM " + tableName);
+
+        String redistributeCol = null;
+        TblColRef distDcol = intermediateTableDesc.getDistributedBy();
+        if (distDcol != null) {
+            redistributeCol = colName(distDcol.getCanonicalName());
+        }
+        appendDistributeStatement(sql, redistributeCol);
+        return sql.toString();
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/233a699f/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java b/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
index 6084e7b..893c034 100644
--- a/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
+++ b/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
@@ -56,5 +56,6 @@ public final class ExecutableConstants {
     public static final String STEP_NAME_BUILD_II = "Build Inverted Index";
     public static final String STEP_NAME_CONVERT_II_TO_HFILE = "Convert Inverted Index Data to HFile";
     public static final String STEP_NAME_UPDATE_II_INFO = "Update Inverted Index Info";
+    public static final String STEP_NAME_REDISTRIBUTE_FLAT_HIVE_TABLE = "Redistribute Flat Hive Table";
     public static final String NOTIFY_EMAIL_TEMPLATE = "<div><b>Build Result of Job ${job_name}</b><pre><ul>" + "<li>Build Result: <b>${result}</b></li>" + "<li>Job Engine: ${job_engine}</li>" + "<li>Env: ${env_name}</li>" + "<li>Project: ${project_name}</li>" + "<li>Cube Name: ${cube_name}</li>" + "<li>Source Records Count: ${source_records_count}</li>" + "<li>Start Time: ${start_time}</li>" + "<li>Duration: ${duration}</li>" + "<li>MR Waiting: ${mr_waiting}</li>" + "<li>Last Update Time: ${last_update_time}</li>" + "<li>Submitter: ${submitter}</li>" + "<li>Error Log: ${error_log}</li>" + "</ul></pre><div/>";
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/233a699f/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java b/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
index 4dedad1..09f9b54 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
@@ -49,7 +49,7 @@ public abstract class AbstractExecutable implements Executable, Idempotent {
     protected static final String START_TIME = "startTime";
     protected static final String END_TIME = "endTime";
 
-    private static final Logger logger = LoggerFactory.getLogger(AbstractExecutable.class);
+    protected static final Logger logger = LoggerFactory.getLogger(AbstractExecutable.class);
     protected int retry = 0;
 
     private String name;

http://git-wip-us.apache.org/repos/asf/kylin/blob/233a699f/core-job/src/test/java/org/apache/kylin/job/JoinedFlatTableTest.java
----------------------------------------------------------------------
diff --git a/core-job/src/test/java/org/apache/kylin/job/JoinedFlatTableTest.java b/core-job/src/test/java/org/apache/kylin/job/JoinedFlatTableTest.java
index 0faf22a..1fe47f8 100644
--- a/core-job/src/test/java/org/apache/kylin/job/JoinedFlatTableTest.java
+++ b/core-job/src/test/java/org/apache/kylin/job/JoinedFlatTableTest.java
@@ -77,7 +77,7 @@ public class JoinedFlatTableTest extends LocalFileMetadataTestCase {
 
     @Test
     public void testGenerateInsertSql() throws IOException {
-        String sqls = JoinedFlatTable.generateInsertDataStatement(flatTableDesc, new JobEngineConfig(KylinConfig.getInstanceFromEnv()));
+        String sqls = JoinedFlatTable.generateInsertDataStatement(flatTableDesc, new JobEngineConfig(KylinConfig.getInstanceFromEnv()), true);
         System.out.println(sqls);
 
         int length = sqls.length();

http://git-wip-us.apache.org/repos/asf/kylin/blob/233a699f/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
index 7f5edfe..d04830b 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/DataModelDesc.java
@@ -314,13 +314,7 @@ public class DataModelDesc extends RootPersistentEntity {
             }
             for (int i = 0; i < fkCols.length; i++) {
                 if (!fkCols[i].getDatatype().equals(pkCols[i].getDatatype())) {
-                    final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-                    final String msg = "Primary key " + lookup.getTable() + "." + pkCols[i].getName() + "." + pkCols[i].getDatatype() + " are not consistent with Foreign key " + this.getFactTable() + "." + fkCols[i].getName() + "." + fkCols[i].getDatatype();
-                    if (kylinConfig.getTableJoinTypeCheck() == true) {
-                        throw new IllegalStateException(msg);
-                    } else {
-                        logger.warn(msg);
-                    }
+                    logger.warn("Primary key " + lookup.getTable() + "." + pkCols[i].getName() + "." + pkCols[i].getDatatype() + " are not consistent with Foreign key " + this.getFactTable() + "." + fkCols[i].getName() + "." + fkCols[i].getDatatype());
                 }
             }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/233a699f/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json
index ca1b35c..0470dc6 100644
--- a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json
+++ b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json
@@ -289,7 +289,8 @@
   "engine_type" : 2,
   "storage_type" : 2,
   "override_kylin_properties": {
-    "kylin.job.cubing.inmem.sampling.hll.precision": "16"
+    "kylin.job.cubing.inmem.sampling.hll.precision": "16",
+    "kylin.hive.create.flat.table.method": "2"
   },
   "partition_date_start": 0
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/233a699f/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
index 7081d02..5397df7 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
@@ -152,7 +152,7 @@ public class CubeController extends BasicController {
         CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
         CubeSegment cubeSegment = cube.getSegment(segmentName, SegmentStatusEnum.READY);
         IJoinedFlatTableDesc flatTableDesc = EngineFactory.getJoinedFlatTableDesc(cubeSegment);
-        String sql = JoinedFlatTable.generateSelectDataStatement(flatTableDesc);
+        String sql = JoinedFlatTable.generateSelectDataStatement(flatTableDesc, false);
 
         GeneralResponse repsonse = new GeneralResponse();
         repsonse.setProperty("sql", sql);

http://git-wip-us.apache.org/repos/asf/kylin/blob/233a699f/source-hive/src/main/java/org/apache/kylin/source/hive/CreateFlatHiveTableStep.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/CreateFlatHiveTableStep.java b/source-hive/src/main/java/org/apache/kylin/source/hive/CreateFlatHiveTableStep.java
index cd32f9c..bcb9a38 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/CreateFlatHiveTableStep.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/CreateFlatHiveTableStep.java
@@ -76,8 +76,11 @@ public class CreateFlatHiveTableStep extends AbstractExecutable {
     private void createFlatHiveTable(KylinConfig config, int numReducers) throws IOException {
         final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
         hiveCmdBuilder.addStatement(getInitStatement());
-        hiveCmdBuilder.addStatement("set mapreduce.job.reduces=" + numReducers + ";\n");
-        hiveCmdBuilder.addStatement("set hive.merge.mapredfiles=false;\n"); //disable merge
+        boolean useRedistribute = getUseRedistribute();
+        if (useRedistribute == true) {
+            hiveCmdBuilder.addStatement("set mapreduce.job.reduces=" + numReducers + ";\n");
+            hiveCmdBuilder.addStatement("set hive.merge.mapredfiles=false;\n"); //disable merge
+        }
         hiveCmdBuilder.addStatement(getCreateTableStatement());
         final String cmd = hiveCmdBuilder.toString();
 
@@ -101,13 +104,20 @@ public class CreateFlatHiveTableStep extends AbstractExecutable {
     protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
         KylinConfig config = getCubeSpecificConfig();
         try {
-            long rowCount = readRowCountFromFile();
-            if (!config.isEmptySegmentAllowed() && rowCount == 0) {
-                stepLogger.log("Detect upstream hive table is empty, " + "fail the job because \"kylin.job.allow.empty.segment\" = \"false\"");
-                return new ExecuteResult(ExecuteResult.State.ERROR, stepLogger.getBufferedLog());
+
+            boolean useRedistribute = getUseRedistribute();
+
+            int numReducers = 0;
+            if (useRedistribute == true) {
+                long rowCount = readRowCountFromFile();
+                if (!config.isEmptySegmentAllowed() && rowCount == 0) {
+                    stepLogger.log("Detect upstream hive table is empty, " + "fail the job because \"kylin.job.allow.empty.segment\" = \"false\"");
+                    return new ExecuteResult(ExecuteResult.State.ERROR, stepLogger.getBufferedLog());
+                }
+
+                numReducers = determineNumReducer(config, rowCount);
             }
 
-            int numReducers = determineNumReducer(config, rowCount);
             createFlatHiveTable(config, numReducers);
             return new ExecuteResult(ExecuteResult.State.SUCCEED, stepLogger.getBufferedLog());
 
@@ -125,6 +135,14 @@ public class CreateFlatHiveTableStep extends AbstractExecutable {
         return getParam("HiveInit");
     }
 
+    public void setUseRedistribute(boolean useRedistribute) {
+        setParam("useRedistribute", String.valueOf(useRedistribute));
+    }
+
+    public boolean getUseRedistribute() {
+        return Boolean.valueOf(getParam("useRedistribute"));
+    }
+
     public void setCreateTableStatement(String sql) {
         setParam("HiveRedistributeData", sql);
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/233a699f/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
index e3d7879..3ea9af5 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
@@ -19,8 +19,10 @@
 package org.apache.kylin.source.hive;
 
 import java.io.IOException;
+import java.io.InputStream;
 import java.util.Set;
 
+import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -28,6 +30,11 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hive.hcatalog.data.HCatRecord;
 import org.apache.hive.hcatalog.mapreduce.HCatInputFormat;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.BufferedLogger;
+import org.apache.kylin.common.util.CliCommandExecutor;
+import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.engine.mr.HadoopUtil;
 import org.apache.kylin.engine.mr.IMRInput;
 import org.apache.kylin.engine.mr.JobBuilderSupport;
@@ -110,16 +117,46 @@ public class HiveMRInput implements IMRInput {
         public void addStepPhase1_CreateFlatTable(DefaultChainedExecutable jobFlow) {
             final String cubeName = CubingExecutableUtil.getCubeName(jobFlow.getParams());
 
-            final String rowCountOutputDir = JobBuilderSupport.getJobWorkingDir(conf, jobFlow.getId()) + "/row_count";
+            final KylinConfig kylinConfig = CubeManager.getInstance(conf.getConfig()).getCube(cubeName).getConfig();
+
+            String createFlatTableMethod = kylinConfig.getCreateFlatHiveTableMethod();
+            if ("1".equals(createFlatTableMethod)) {
+                // create flat table first, then count and redistribute
+                jobFlow.addTask(createFlatHiveTableStep(conf, flatDesc, jobFlow.getId(), cubeName, false, ""));
+                jobFlow.addTask(createRedistributeFlatHiveTableStep(conf, flatDesc, jobFlow.getId(), cubeName));
+            } else if ("2".equals(createFlatTableMethod)) {
+                // count from source table first, and then redistribute, suitable for partitioned table
+                final String rowCountOutputDir = JobBuilderSupport.getJobWorkingDir(conf, jobFlow.getId()) + "/row_count";
+                jobFlow.addTask(createCountHiveTableStep(conf, flatDesc, jobFlow.getId(), rowCountOutputDir));
+                jobFlow.addTask(createFlatHiveTableStep(conf, flatDesc, jobFlow.getId(), cubeName, true, rowCountOutputDir));
+            } else {
+                throw new IllegalArgumentException("Unknown value for kylin.hive.create.flat.table.method: " + createFlatTableMethod);
+            }
 
-            jobFlow.addTask(createCountHiveTableStep(conf, flatDesc, jobFlow.getId(), rowCountOutputDir));
-            jobFlow.addTask(createFlatHiveTableStep(conf, flatDesc, jobFlow.getId(), cubeName, rowCountOutputDir));
             AbstractExecutable task = createLookupHiveViewMaterializationStep(jobFlow.getId());
             if (task != null) {
                 jobFlow.addTask(task);
             }
         }
 
+        public static AbstractExecutable createRedistributeFlatHiveTableStep(JobEngineConfig conf, IJoinedFlatTableDesc flatTableDesc, String jobId, String cubeName) {
+            StringBuilder hiveInitBuf = new StringBuilder();
+            hiveInitBuf.append("USE ").append(conf.getConfig().getHiveDatabaseForIntermediateTable()).append(";\n");
+            hiveInitBuf.append(JoinedFlatTable.generateHiveSetStatements(conf));
+
+            String rowCountOutputDir = JobBuilderSupport.getJobWorkingDir(conf, jobId) + "/row_count";
+
+            RedistributeFlatHiveTableStep step = new RedistributeFlatHiveTableStep();
+            step.setInitStatement(hiveInitBuf.toString());
+            step.setSelectRowCountStatement(JoinedFlatTable.generateSelectRowCountStatement(flatTableDesc, rowCountOutputDir));
+            step.setRowCountOutputDir(rowCountOutputDir);
+            step.setRedistributeDataStatement(JoinedFlatTable.generateRedistributeFlatTableStatement(flatTableDesc));
+            CubingExecutableUtil.setCubeName(cubeName, step.getParams());
+            step.setName(ExecutableConstants.STEP_NAME_REDISTRIBUTE_FLAT_HIVE_TABLE);
+            return step;
+        }
+
+
         public static AbstractExecutable createCountHiveTableStep(JobEngineConfig conf, IJoinedFlatTableDesc flatTableDesc, String jobId, String rowCountOutputDir) {
             final ShellExecutable step = new ShellExecutable();
 
@@ -174,17 +211,17 @@ public class HiveMRInput implements IMRInput {
             return step;
         }
 
-        public static AbstractExecutable createFlatHiveTableStep(JobEngineConfig conf, IJoinedFlatTableDesc flatTableDesc, String jobId, String cubeName, String rowCountOutputDir) {
+        public static AbstractExecutable createFlatHiveTableStep(JobEngineConfig conf, IJoinedFlatTableDesc flatTableDesc, String jobId, String cubeName, boolean redistribute, String rowCountOutputDir) {
             StringBuilder hiveInitBuf = new StringBuilder();
             hiveInitBuf.append(JoinedFlatTable.generateHiveSetStatements(conf));
 
             final String useDatabaseHql = "USE " + conf.getConfig().getHiveDatabaseForIntermediateTable() + ";\n";
             final String dropTableHql = JoinedFlatTable.generateDropTableStatement(flatTableDesc);
             final String createTableHql = JoinedFlatTable.generateCreateTableStatement(flatTableDesc, JobBuilderSupport.getJobWorkingDir(conf, jobId));
-            String insertDataHqls;
-            insertDataHqls = JoinedFlatTable.generateInsertDataStatement(flatTableDesc, conf);
+            String insertDataHqls = JoinedFlatTable.generateInsertDataStatement(flatTableDesc, conf, redistribute);
 
             CreateFlatHiveTableStep step = new CreateFlatHiveTableStep();
+            step.setUseRedistribute(redistribute);
             step.setInitStatement(hiveInitBuf.toString());
             step.setRowCountOutputDir(rowCountOutputDir);
             step.setCreateTableStatement(useDatabaseHql + dropTableHql + createTableHql + insertDataHqls);
@@ -213,6 +250,126 @@ public class HiveMRInput implements IMRInput {
         }
     }
 
+    public static class RedistributeFlatHiveTableStep extends AbstractExecutable {
+        private final BufferedLogger stepLogger = new BufferedLogger(logger);
+
+        private void computeRowCount(CliCommandExecutor cmdExecutor) throws IOException {
+            final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
+            hiveCmdBuilder.addStatement(getInitStatement());
+            hiveCmdBuilder.addStatement("set hive.exec.compress.output=false;\n");
+            hiveCmdBuilder.addStatement(getSelectRowCountStatement());
+            final String cmd = hiveCmdBuilder.build();
+
+            stepLogger.log("Compute row count of flat hive table, cmd: ");
+            stepLogger.log(cmd);
+
+            Pair<Integer, String> response = cmdExecutor.execute(cmd, stepLogger);
+            if (response.getFirst() != 0) {
+                throw new RuntimeException("Failed to compute row count of flat hive table");
+            }
+        }
+
+        private long readRowCountFromFile(Path file) throws IOException {
+            FileSystem fs = FileSystem.get(file.toUri(), HadoopUtil.getCurrentConfiguration());
+            InputStream in = fs.open(file);
+            try {
+                String content = IOUtils.toString(in);
+                return Long.valueOf(content.trim()); // strip the '\n' character
+
+            } finally {
+                IOUtils.closeQuietly(in);
+            }
+        }
+
+        private int determineNumReducer(KylinConfig config) throws IOException {
+            computeRowCount(config.getCliCommandExecutor());
+
+            Path rowCountFile = new Path(getRowCountOutputDir(), "000000_0");
+            long rowCount = readRowCountFromFile(rowCountFile);
+            int mapperInputRows = config.getHadoopJobMapperInputRows();
+
+            int numReducers = Math.round(rowCount / ((float) mapperInputRows));
+            numReducers = Math.max(1, numReducers);
+
+            stepLogger.log("total input rows = " + rowCount);
+            stepLogger.log("expected input rows per mapper = " + mapperInputRows);
+            stepLogger.log("num reducers for RedistributeFlatHiveTableStep = " + numReducers);
+
+            return numReducers;
+        }
+
+        private void redistributeTable(KylinConfig config, int numReducers) throws IOException {
+            final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
+            hiveCmdBuilder.addStatement(getInitStatement());
+            hiveCmdBuilder.addStatement("set mapreduce.job.reduces=" + numReducers + ";\n");
+            hiveCmdBuilder.addStatement("set hive.merge.mapredfiles=false;\n");
+            hiveCmdBuilder.addStatement(getRedistributeDataStatement());
+            final String cmd = hiveCmdBuilder.toString();
+
+            stepLogger.log("Redistribute table, cmd: ");
+            stepLogger.log(cmd);
+
+            Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger);
+            if (response.getFirst() != 0) {
+                throw new RuntimeException("Failed to redistribute flat hive table");
+            }
+        }
+
+        private KylinConfig getCubeSpecificConfig() {
+            String cubeName = CubingExecutableUtil.getCubeName(getParams());
+            CubeManager manager = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
+            CubeInstance cube = manager.getCube(cubeName);
+            return cube.getConfig();
+        }
+
+        @Override
+        protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
+            KylinConfig config = getCubeSpecificConfig();
+
+            try {
+                int numReducers = determineNumReducer(config);
+                redistributeTable(config, numReducers);
+                return new ExecuteResult(ExecuteResult.State.SUCCEED, stepLogger.getBufferedLog());
+
+            } catch (Exception e) {
+                logger.error("job:" + getId() + " execute finished with exception", e);
+                return new ExecuteResult(ExecuteResult.State.ERROR, stepLogger.getBufferedLog());
+            }
+        }
+
+        public void setInitStatement(String sql) {
+            setParam("HiveInit", sql);
+        }
+
+        public String getInitStatement() {
+            return getParam("HiveInit");
+        }
+
+        public void setSelectRowCountStatement(String sql) {
+            setParam("HiveSelectRowCount", sql);
+        }
+
+        public String getSelectRowCountStatement() {
+            return getParam("HiveSelectRowCount");
+        }
+
+        public void setRedistributeDataStatement(String sql) {
+            setParam("HiveRedistributeData", sql);
+        }
+
+        public String getRedistributeDataStatement() {
+            return getParam("HiveRedistributeData");
+        }
+
+        public void setRowCountOutputDir(String rowCountOutputDir) {
+            setParam("rowCountOutputDir", rowCountOutputDir);
+        }
+
+        public String getRowCountOutputDir() {
+            return getParam("rowCountOutputDir");
+        }
+    }
+
     public static class GarbageCollectionStep extends AbstractExecutable {
         private static final Logger logger = LoggerFactory.getLogger(GarbageCollectionStep.class);
 


[18/50] [abbrv] kylin git commit: KYLIN-2004 check whether source data is empty

Posted by sh...@apache.org.
KYLIN-2004 check whether source data is empty

Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/56136ede
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/56136ede
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/56136ede

Branch: refs/heads/KYLIN-1726
Commit: 56136ede7c8b9abac5ddd7b7785b3f63c59b74db
Parents: 233a699
Author: shaofengshi <sh...@apache.org>
Authored: Sat Sep 10 17:52:32 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Sat Sep 10 17:59:59 2016 +0800

----------------------------------------------------------------------
 .../apache/kylin/source/hive/HiveMRInput.java   | 37 ++++++++++----------
 1 file changed, 19 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/56136ede/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
index 3ea9af5..520d7cc 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
@@ -281,23 +281,6 @@ public class HiveMRInput implements IMRInput {
             }
         }
 
-        private int determineNumReducer(KylinConfig config) throws IOException {
-            computeRowCount(config.getCliCommandExecutor());
-
-            Path rowCountFile = new Path(getRowCountOutputDir(), "000000_0");
-            long rowCount = readRowCountFromFile(rowCountFile);
-            int mapperInputRows = config.getHadoopJobMapperInputRows();
-
-            int numReducers = Math.round(rowCount / ((float) mapperInputRows));
-            numReducers = Math.max(1, numReducers);
-
-            stepLogger.log("total input rows = " + rowCount);
-            stepLogger.log("expected input rows per mapper = " + mapperInputRows);
-            stepLogger.log("num reducers for RedistributeFlatHiveTableStep = " + numReducers);
-
-            return numReducers;
-        }
-
         private void redistributeTable(KylinConfig config, int numReducers) throws IOException {
             final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
             hiveCmdBuilder.addStatement(getInitStatement());
@@ -327,7 +310,25 @@ public class HiveMRInput implements IMRInput {
             KylinConfig config = getCubeSpecificConfig();
 
             try {
-                int numReducers = determineNumReducer(config);
+
+                computeRowCount(config.getCliCommandExecutor());
+
+                Path rowCountFile = new Path(getRowCountOutputDir(), "000000_0");
+                long rowCount = readRowCountFromFile(rowCountFile);
+                if (!config.isEmptySegmentAllowed() && rowCount == 0) {
+                    stepLogger.log("Detect upstream hive table is empty, " + "fail the job because \"kylin.job.allow.empty.segment\" = \"false\"");
+                    return new ExecuteResult(ExecuteResult.State.ERROR, stepLogger.getBufferedLog());
+                }
+
+                int mapperInputRows = config.getHadoopJobMapperInputRows();
+
+                int numReducers = Math.round(rowCount / ((float) mapperInputRows));
+                numReducers = Math.max(1, numReducers);
+
+                stepLogger.log("total input rows = " + rowCount);
+                stepLogger.log("expected input rows per mapper = " + mapperInputRows);
+                stepLogger.log("num reducers for RedistributeFlatHiveTableStep = " + numReducers);
+
                 redistributeTable(config, numReducers);
                 return new ExecuteResult(ExecuteResult.State.SUCCEED, stepLogger.getBufferedLog());
 


[31/50] [abbrv] kylin git commit: KYLIN-1922 fix CI, again

Posted by sh...@apache.org.
KYLIN-1922 fix CI, again


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/0362c2bb
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/0362c2bb
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/0362c2bb

Branch: refs/heads/KYLIN-1726
Commit: 0362c2bb8ea8c9f0178fbe4eb08113eaf94cd01b
Parents: 3c4537d
Author: Hongbin Ma <ma...@apache.org>
Authored: Tue Sep 13 11:58:41 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Tue Sep 13 14:10:30 2016 +0800

----------------------------------------------------------------------
 .../kylin/cube/CubeCapabilityChecker.java       |  9 +++++++-
 .../kylin/measure/topn/TopNMeasureType.java     |  3 +--
 .../src/test/resources/query/sql/query45.sql    | 23 --------------------
 .../resources/query/sql_tableau/query16.sql     |  1 +
 4 files changed, 10 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/0362c2bb/core-cube/src/main/java/org/apache/kylin/cube/CubeCapabilityChecker.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeCapabilityChecker.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeCapabilityChecker.java
index e8c96b4..caef529 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeCapabilityChecker.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeCapabilityChecker.java
@@ -24,6 +24,7 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Set;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.cube.model.CubeDesc;
 import org.apache.kylin.measure.MeasureType;
 import org.apache.kylin.measure.basic.BasicMeasureType;
@@ -39,6 +40,7 @@ import org.apache.kylin.metadata.realization.CapabilityResult.CapabilityInfluenc
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 
 /**
@@ -163,6 +165,7 @@ public class CubeCapabilityChecker {
     // custom measure types can cover unmatched dimensions or measures
     private static void tryCustomMeasureTypes(Collection<TblColRef> unmatchedDimensions, Collection<FunctionDesc> unmatchedAggregations, SQLDigest digest, CubeInstance cube, CapabilityResult result) {
         CubeDesc cubeDesc = cube.getDescriptor();
+        List<String> influencingMeasures = Lists.newArrayList();
         for (MeasureDesc measure : cubeDesc.getMeasures()) {
             //            if (unmatchedDimensions.isEmpty() && unmatchedAggregations.isEmpty())
             //                break;
@@ -172,9 +175,13 @@ public class CubeCapabilityChecker {
                 continue;
 
             CapabilityInfluence inf = measureType.influenceCapabilityCheck(unmatchedDimensions, unmatchedAggregations, digest, measure);
-            if (inf != null)
+            if (inf != null) {
                 result.influences.add(inf);
+                influencingMeasures.add(measure.getName() + "@" + measureType.getClass());
+            }
         }
+        if (influencingMeasures.size() != 0)
+            logger.info("Cube {} CapabilityInfluences: {}", cube.getCanonicalName(), StringUtils.join(influencingMeasures, ","));
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/0362c2bb/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
index 01eb90c..800ca88 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
@@ -274,11 +274,10 @@ public class TopNMeasureType extends MeasureType<TopNCounter<ByteArray>> {
 
         if (sum.isSum() == false)
             return false;
-        
+
         if (sum.getParameter() == null || sum.getParameter().getColRefs() == null || sum.getParameter().getColRefs().size() == 0)
             return false;
 
-
         TblColRef sumCol = sum.getParameter().getColRefs().get(0);
         return sumCol.equals(topnNumCol);
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/0362c2bb/kylin-it/src/test/resources/query/sql/query45.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql/query45.sql b/kylin-it/src/test/resources/query/sql/query45.sql
deleted file mode 100644
index d3cfb06..0000000
--- a/kylin-it/src/test/resources/query/sql/query45.sql
+++ /dev/null
@@ -1,23 +0,0 @@
---
--- Licensed to the Apache Software Foundation (ASF) under one
--- or more contributor license agreements.  See the NOTICE file
--- distributed with this work for additional information
--- regarding copyright ownership.  The ASF licenses this file
--- to you under the Apache License, Version 2.0 (the
--- "License"); you may not use this file except in compliance
--- with the License.  You may obtain a copy of the License at
---
---     http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
---
-
-
-
-select seller_id,lstg_format_name, sum(price) as s from test_kylin_fact
-  where lstg_format_name='FP-GTC' 
-  group by seller_id,lstg_format_name

http://git-wip-us.apache.org/repos/asf/kylin/blob/0362c2bb/kylin-it/src/test/resources/query/sql_tableau/query16.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql_tableau/query16.sql b/kylin-it/src/test/resources/query/sql_tableau/query16.sql
index aaa4ab2..ad82c09 100644
--- a/kylin-it/src/test/resources/query/sql_tableau/query16.sql
+++ b/kylin-it/src/test/resources/query/sql_tableau/query16.sql
@@ -20,3 +20,4 @@ SELECT EXTRACT(YEAR FROM TEST_CAL_DT.WEEK_BEG_DT) AS yr_WEEK_BEG_DT_ok, QUARTER(
  FROM TEST_KYLIN_FACT 
  inner JOIN EDW.TEST_CAL_DT AS TEST_CAL_DT ON (TEST_KYLIN_FACT.CAL_DT = TEST_CAL_DT.CAL_DT) 
  GROUP BY EXTRACT(YEAR FROM TEST_CAL_DT.WEEK_BEG_DT), QUARTER(TEST_CAL_DT.WEEK_BEG_DT) 
+ 


[33/50] [abbrv] kylin git commit: minor but important, add .gitconfig

Posted by sh...@apache.org.
minor but important, add .gitconfig


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/973ecc74
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/973ecc74
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/973ecc74

Branch: refs/heads/KYLIN-1726
Commit: 973ecc74943613e32e5a9145bff3cad4fe48e01f
Parents: 5dc5ac8
Author: Li Yang <li...@apache.org>
Authored: Tue Sep 13 17:22:24 2016 +0800
Committer: Li Yang <li...@apache.org>
Committed: Tue Sep 13 17:22:24 2016 +0800

----------------------------------------------------------------------
 .gitconfig | 3 +++
 1 file changed, 3 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/973ecc74/.gitconfig
----------------------------------------------------------------------
diff --git a/.gitconfig b/.gitconfig
new file mode 100644
index 0000000..88bd52b
--- /dev/null
+++ b/.gitconfig
@@ -0,0 +1,3 @@
+[core]
+	ignorecase = false
+	autocrlf = false


[42/50] [abbrv] kylin git commit: KYLIN-1726 update to kafka 0.10

Posted by sh...@apache.org.
KYLIN-1726 update to kafka 0.10


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/1b1b2e37
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/1b1b2e37
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/1b1b2e37

Branch: refs/heads/KYLIN-1726
Commit: 1b1b2e37fdcba7ad67f0fa3f2369aa65431f13bc
Parents: 4e060e7
Author: shaofengshi <sh...@apache.org>
Authored: Fri Sep 2 19:25:57 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Wed Sep 14 16:34:36 2016 +0800

----------------------------------------------------------------------
 .../java/org/apache/kylin/cube/CubeManager.java | 30 ++++++++++++----
 .../kylin/rest/controller/CubeController.java   |  8 ++---
 .../apache/kylin/rest/service/JobService.java   |  4 +--
 .../source/kafka/util/KafkaSampleProducer.java  | 38 ++++++++++++--------
 4 files changed, 53 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/1b1b2e37/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
index fc68798..11eabce 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
@@ -679,12 +679,28 @@ public class CubeManager implements IRealizationProvider {
             return null;
         }
 
-        if (cube.getBuildingSegments().size() > 0) {
-            logger.debug("Cube " + cube.getName() + " has bulding segment, will not trigger merge at this moment");
-            return null;
+        List<CubeSegment> buildingSegs = cube.getBuildingSegments();
+        if (buildingSegs.size() > 0) {
+            logger.debug("Cube " + cube.getName() + " has " + buildingSegs.size() + " building segments");
+        }
+
+        List<CubeSegment> readySegs = cube.getSegments(SegmentStatusEnum.READY);
+
+        List<CubeSegment> mergingSegs = Lists.newArrayList();
+        if (buildingSegs.size() > 0) {
+            
+            for (CubeSegment building : buildingSegs) {
+                // exclude those under-merging segs
+                for (CubeSegment ready : readySegs) {
+                    if (ready.getSourceOffsetStart() >= building.getSourceOffsetStart() && ready.getSourceOffsetEnd() <= building.getSourceOffsetEnd()) {
+                        mergingSegs.add(ready);
+                    }
+                }
+            }
         }
 
-        List<CubeSegment> ready = cube.getSegments(SegmentStatusEnum.READY);
+        // exclude those already under merging segments
+        readySegs.removeAll(mergingSegs);
 
         long[] timeRanges = cube.getDescriptor().getAutoMergeTimeRanges();
         Arrays.sort(timeRanges);
@@ -692,9 +708,9 @@ public class CubeManager implements IRealizationProvider {
         for (int i = timeRanges.length - 1; i >= 0; i--) {
             long toMergeRange = timeRanges[i];
 
-            for (int s = 0; s < ready.size(); s++) {
-                CubeSegment seg = ready.get(s);
-                Pair<CubeSegment, CubeSegment> p = findMergeOffsetsByDateRange(ready.subList(s, ready.size()), //
+            for (int s = 0; s < readySegs.size(); s++) {
+                CubeSegment seg = readySegs.get(s);
+                Pair<CubeSegment, CubeSegment> p = findMergeOffsetsByDateRange(readySegs.subList(s, readySegs.size()), //
                         seg.getDateRangeStart(), seg.getDateRangeStart() + toMergeRange, toMergeRange);
                 if (p != null && p.getSecond().getDateRangeEnd() - p.getFirst().getDateRangeStart() >= toMergeRange)
                     return Pair.newPair(p.getFirst().getSourceOffsetStart(), p.getSecond().getSourceOffsetEnd());

http://git-wip-us.apache.org/repos/asf/kylin/blob/1b1b2e37/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
index 42b117c..669f53e 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
@@ -272,7 +272,7 @@ public class CubeController extends BasicController {
     @RequestMapping(value = "/{cubeName}/rebuild", method = { RequestMethod.PUT })
     @ResponseBody
     public JobInstance rebuild(@PathVariable String cubeName, @RequestBody JobBuildRequest req) {
-        return buildInternal(cubeName, req.getStartTime(), req.getEndTime(), 0, 0, req.getBuildType(), req.isForce() || req.isForceMergeEmptySegment());
+        return buildInternal(cubeName, req.getStartTime(), req.getEndTime(), 0, 0, req.getBuildType(), true, req.isForce() || req.isForceMergeEmptySegment());
     }
 
     /** Build/Rebuild a cube segment by source offset */
@@ -286,16 +286,16 @@ public class CubeController extends BasicController {
     @RequestMapping(value = "/{cubeName}/rebuild2", method = { RequestMethod.PUT })
     @ResponseBody
     public JobInstance rebuild(@PathVariable String cubeName, @RequestBody JobBuildRequest2 req) {
-        return buildInternal(cubeName, 0, 0, req.getStartSourceOffset(), req.getEndSourceOffset(), req.getBuildType(), req.isForce());
+        return buildInternal(cubeName, 0, 0, req.getStartSourceOffset(), req.getEndSourceOffset(), req.getBuildType(), false, req.isForce());
     }
 
     private JobInstance buildInternal(String cubeName, long startTime, long endTime, //
-            long startOffset, long endOffset, String buildType, boolean force) {
+            long startOffset, long endOffset, String buildType, boolean strictCheck, boolean force) {
         try {
             String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
             CubeInstance cube = jobService.getCubeManager().getCube(cubeName);
             return jobService.submitJob(cube, startTime, endTime, startOffset, endOffset, //
-                    CubeBuildTypeEnum.valueOf(buildType), force, submitter);
+                    CubeBuildTypeEnum.valueOf(buildType), strictCheck, force, submitter);
         } catch (Exception e) {
             logger.error(e.getLocalizedMessage(), e);
             throw new InternalErrorException(e.getLocalizedMessage());

http://git-wip-us.apache.org/repos/asf/kylin/blob/1b1b2e37/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java b/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
index 5c704ba..8929bf1 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
@@ -199,7 +199,7 @@ public class JobService extends BasicService {
 
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
     public JobInstance submitJob(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset, //
-            CubeBuildTypeEnum buildType, boolean force, String submitter) throws IOException, JobException {
+            CubeBuildTypeEnum buildType, boolean strictCheck, boolean force, String submitter) throws IOException, JobException {
 
         if (cube.getStatus() == RealizationStatusEnum.DESCBROKEN) {
             throw new BadRequestException("Broken cube " + cube.getName() + " can't be built");
@@ -211,7 +211,7 @@ public class JobService extends BasicService {
         DefaultChainedExecutable job;
 
         if (buildType == CubeBuildTypeEnum.BUILD) {
-            CubeSegment newSeg = getCubeManager().appendSegment(cube, startDate, endDate, startOffset, endOffset);
+            CubeSegment newSeg = getCubeManager().appendSegment(cube, startDate, endDate, startOffset, endOffset, strictCheck);
             job = EngineFactory.createBatchCubingJob(newSeg, submitter);
         } else if (buildType == CubeBuildTypeEnum.MERGE) {
             CubeSegment newSeg = getCubeManager().mergeSegments(cube, startDate, endDate, startOffset, endOffset, force);

http://git-wip-us.apache.org/repos/asf/kylin/blob/1b1b2e37/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaSampleProducer.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaSampleProducer.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaSampleProducer.java
index 2a86a98..3d26d3d 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaSampleProducer.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaSampleProducer.java
@@ -30,16 +30,15 @@ import java.util.Random;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.Producer;
+import org.apache.kafka.clients.producer.ProducerRecord;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 
-import kafka.javaapi.producer.Producer;
-import kafka.producer.KeyedMessage;
-import kafka.producer.ProducerConfig;
-
 /**
  * A sample producer which will create sample data to kafka topic
  */
@@ -49,7 +48,8 @@ public class KafkaSampleProducer {
     @SuppressWarnings("static-access")
     private static final Option OPTION_TOPIC = OptionBuilder.withArgName("topic").hasArg().isRequired(true).withDescription("Kafka topic").create("topic");
     private static final Option OPTION_BROKER = OptionBuilder.withArgName("broker").hasArg().isRequired(true).withDescription("Kafka broker").create("broker");
-    private static final Option OPTION_DELAY = OptionBuilder.withArgName("delay").hasArg().isRequired(false).withDescription("Simulated message delay").create("delay");
+    private static final Option OPTION_DELAY = OptionBuilder.withArgName("delay").hasArg().isRequired(false).withDescription("Simulated message delay in mili-seconds, default 0").create("delay");
+    private static final Option OPTION_INTERVAL = OptionBuilder.withArgName("interval").hasArg().isRequired(false).withDescription("Simulated message interval in mili-seconds, default 1000").create("interval");
 
     private static final ObjectMapper mapper = new ObjectMapper();
 
@@ -61,6 +61,7 @@ public class KafkaSampleProducer {
         options.addOption(OPTION_TOPIC);
         options.addOption(OPTION_BROKER);
         options.addOption(OPTION_DELAY);
+        options.addOption(OPTION_INTERVAL);
         optionsHelper.parseOptions(options, args);
 
         logger.info("options: '" + optionsHelper.getOptionsAsString() + "'");
@@ -70,7 +71,13 @@ public class KafkaSampleProducer {
         long delay = 0;
         String delayString = optionsHelper.getOptionValue(OPTION_DELAY);
         if (delayString != null) {
-            delay = Long.parseLong(optionsHelper.getOptionValue(OPTION_DELAY));
+            delay = Long.parseLong(delayString);
+        }
+
+        long interval = 1000;
+        String intervalString = optionsHelper.getOptionValue(OPTION_INTERVAL);
+        if (intervalString != null) {
+            interval = Long.parseLong(intervalString);
         }
 
         List<String> countries = new ArrayList();
@@ -95,13 +102,16 @@ public class KafkaSampleProducer {
         devices.add("Other");
 
         Properties props = new Properties();
-        props.put("metadata.broker.list", broker);
-        props.put("serializer.class", "kafka.serializer.StringEncoder");
-        props.put("request.required.acks", "1");
-
-        ProducerConfig config = new ProducerConfig(props);
+        props.put("bootstrap.servers", broker);
+        props.put("acks", "all");
+        props.put("retries", 0);
+        props.put("batch.size", 16384);
+        props.put("linger.ms", 1);
+        props.put("buffer.memory", 33554432);
+        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
+        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
 
-        Producer<String, String> producer = new Producer<String, String>(config);
+        Producer<String, String> producer = new KafkaProducer<>(props);
 
         boolean alive = true;
         Random rnd = new Random();
@@ -114,10 +124,10 @@ public class KafkaSampleProducer {
             record.put("qty", rnd.nextInt(10));
             record.put("currency", "USD");
             record.put("amount", rnd.nextDouble() * 100);
-            KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, System.currentTimeMillis() + "", mapper.writeValueAsString(record));
+            ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, System.currentTimeMillis() + "", mapper.writeValueAsString(record));
             System.out.println("Sending 1 message");
             producer.send(data);
-            Thread.sleep(2000);
+            Thread.sleep(interval);
         }
         producer.close();
     }


[38/50] [abbrv] kylin git commit: minor, drop files with wrong name cases

Posted by sh...@apache.org.
minor, drop files with wrong name cases


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/c8326d72
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/c8326d72
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/c8326d72

Branch: refs/heads/KYLIN-1726
Commit: c8326d729b0fc3fe9e3b72e5c89b9a69e607be93
Parents: 973ecc7
Author: Li Yang <li...@apache.org>
Authored: Tue Sep 13 17:42:33 2016 +0000
Committer: Li Yang <li...@apache.org>
Committed: Tue Sep 13 17:42:33 2016 +0000

----------------------------------------------------------------------
 .../kafka/default.streaming_table.json          | 21 --------------------
 .../streaming/default.streaming_table.json      |  6 ------
 2 files changed, 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/c8326d72/examples/test_case_data/localmeta/kafka/default.streaming_table.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/kafka/default.streaming_table.json b/examples/test_case_data/localmeta/kafka/default.streaming_table.json
deleted file mode 100644
index 6a64cce..0000000
--- a/examples/test_case_data/localmeta/kafka/default.streaming_table.json
+++ /dev/null
@@ -1,21 +0,0 @@
-{
- 
-  "uuid": "8b2b9dfe-777c-4d39-bf89-8472ec909193",
-  "name": "DEFAULT.STREAMING_TABLE",
-  "topic": "test_streaming_table_topic_xyz",
-  "timeout": 60000,
-  "bufferSize": 65536,
-  "parserName": "org.apache.kylin.source.kafka.TimedJsonStreamParser",
-  "last_modified": 0,
-  "clusters": [
-    {
-      "brokers": [
-        {
-          "id": 0,
-          "host": "sandbox",
-          "port": 6667
-        }
-      ]
-    }
-  ]
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/c8326d72/examples/test_case_data/localmeta/streaming/default.streaming_table.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/streaming/default.streaming_table.json b/examples/test_case_data/localmeta/streaming/default.streaming_table.json
deleted file mode 100644
index 85a477b..0000000
--- a/examples/test_case_data/localmeta/streaming/default.streaming_table.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
-  "uuid": "8b2b9dfe-777c-4d39-bf89-8472ec909193",
-  "name": "DEFAULT.STREAMING_TABLE",
-  "type": "kafka",
-  "last_modified": 0
-}


[23/50] [abbrv] kylin git commit: KYLIN-1983 add license header

Posted by sh...@apache.org.
KYLIN-1983 add license header

Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/b941f115
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/b941f115
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/b941f115

Branch: refs/heads/KYLIN-1726
Commit: b941f115d5dae06397605c73cb630fbc31c9a5ab
Parents: 5e95abd
Author: shaofengshi <sh...@apache.org>
Authored: Sun Sep 11 09:29:45 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Sun Sep 11 10:13:28 2016 +0800

----------------------------------------------------------------------
 .../MultipleDictionaryValueEnumeratorTest.java    | 18 ++++++++++++++++++
 .../localmeta/kylin_account.properties            | 17 +++++++++++++++++
 2 files changed, 35 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/b941f115/core-dictionary/src/test/java/org/apache/kylin/dict/MultipleDictionaryValueEnumeratorTest.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/MultipleDictionaryValueEnumeratorTest.java b/core-dictionary/src/test/java/org/apache/kylin/dict/MultipleDictionaryValueEnumeratorTest.java
index 6e0f88a..ed8a3c2 100644
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/MultipleDictionaryValueEnumeratorTest.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/MultipleDictionaryValueEnumeratorTest.java
@@ -1,3 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
 package org.apache.kylin.dict;
 
 import org.apache.kylin.common.util.Bytes;

http://git-wip-us.apache.org/repos/asf/kylin/blob/b941f115/examples/test_case_data/localmeta/kylin_account.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/kylin_account.properties b/examples/test_case_data/localmeta/kylin_account.properties
index 67bbb16..ac34172 100644
--- a/examples/test_case_data/localmeta/kylin_account.properties
+++ b/examples/test_case_data/localmeta/kylin_account.properties
@@ -1,3 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 ### JOB ###
 
 # Only necessary when kylin.job.run.as.remote.cmd=true


[29/50] [abbrv] kylin git commit: KYLIN-1922 fix CI

Posted by sh...@apache.org.
KYLIN-1922 fix CI


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/6ed643bf
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/6ed643bf
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/6ed643bf

Branch: refs/heads/KYLIN-1726
Commit: 6ed643bfaa0f571aa2ab538c0d011c293174c987
Parents: 4e8ed97
Author: Hongbin Ma <ma...@apache.org>
Authored: Tue Sep 13 09:41:07 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Tue Sep 13 09:41:07 2016 +0800

----------------------------------------------------------------------
 kylin-it/src/test/resources/query/sql/query45.sql | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/6ed643bf/kylin-it/src/test/resources/query/sql/query45.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql/query45.sql b/kylin-it/src/test/resources/query/sql/query45.sql
index 0c78657..7b55443 100644
--- a/kylin-it/src/test/resources/query/sql/query45.sql
+++ b/kylin-it/src/test/resources/query/sql/query45.sql
@@ -18,6 +18,6 @@
 
 
 
-select seller_id, sum(price) from test_kylin_fact
+select seller_id, sum(price) as s from test_kylin_fact
   where lstg_format_name='FP-GTC' 
   group by seller_id limit 20


[37/50] [abbrv] kylin git commit: minor: enable mail notification for build fail

Posted by sh...@apache.org.
minor: enable mail notification for build fail

Signed-off-by: lidongsjtu <li...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/617734be
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/617734be
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/617734be

Branch: refs/heads/KYLIN-1726
Commit: 617734be4d9259b511c84abfa23870c4a235c816
Parents: 823d98a
Author: Yiming Liu <li...@gmail.com>
Authored: Tue Sep 13 23:39:26 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Tue Sep 13 23:45:24 2016 +0800

----------------------------------------------------------------------
 .travis.yml | 13 ++++++-------
 1 file changed, 6 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/617734be/.travis.yml
----------------------------------------------------------------------
diff --git a/.travis.yml b/.travis.yml
index 16ba4c1..5ca9d74 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,5 +1,4 @@
 # Configuration file for Travis continuous integration.
-# See https://travis-ci.org/apache/calcite
 #
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
@@ -20,13 +19,13 @@ language: java
 
 jdk:
   - oraclejdk7
+  - oraclejdk8
 
-#notification:
-#  email:
-#    recipients:
-#      - XXX@apache.org
-#    on_success: always
-#    on_failure: always
+notification:
+  email:
+    recipients:
+      - dev@kylin.apache.org
+    on_failure: always
 
 #branches:
 #  only:


[05/50] [abbrv] kylin git commit: minor, add diagnosis to smoke test

Posted by sh...@apache.org.
minor, add diagnosis to smoke test


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/bf261148
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/bf261148
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/bf261148

Branch: refs/heads/KYLIN-1726
Commit: bf26114866d9b4f8dcb5e13c48acdbf6c8e308e9
Parents: be7751b
Author: lidongsjtu <li...@apache.org>
Authored: Fri Sep 9 12:37:05 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Fri Sep 9 12:37:05 2016 +0800

----------------------------------------------------------------------
 build/smoke-test/smoke-test.sh |  1 +
 build/smoke-test/testDiag.py   | 44 +++++++++++++++++++++++++++++++++++++
 2 files changed, 45 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/bf261148/build/smoke-test/smoke-test.sh
----------------------------------------------------------------------
diff --git a/build/smoke-test/smoke-test.sh b/build/smoke-test/smoke-test.sh
index 06d930a..c21bd6d 100755
--- a/build/smoke-test/smoke-test.sh
+++ b/build/smoke-test/smoke-test.sh
@@ -67,6 +67,7 @@ sleep 3m
 cd $dir/smoke-test
 python testBuildCube.py     || { exit 1; }
 python testQuery.py         || { exit 1; }
+python testDiag.py         || { exit 1; }
 cd -
 
 # Tear down stage

http://git-wip-us.apache.org/repos/asf/kylin/blob/bf261148/build/smoke-test/testDiag.py
----------------------------------------------------------------------
diff --git a/build/smoke-test/testDiag.py b/build/smoke-test/testDiag.py
new file mode 100644
index 0000000..cc932da
--- /dev/null
+++ b/build/smoke-test/testDiag.py
@@ -0,0 +1,44 @@
+#!/usr/bin/python
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# This is python unittest used in smoke-test.sh, aim to testing query via rest APIs.
+
+import unittest
+import requests
+
+class testDiag(unittest.TestCase):
+    def setUp(self):
+        pass
+
+    def tearDown(self):
+        pass
+
+    def testDiag(self):
+        url = "http://sandbox:7070/kylin/api/diag/project/learn_kylin/download"
+        headers = {
+            'content-type': "application/json",
+            'authorization': "Basic QURNSU46S1lMSU4=",
+            'cache-control': "no-cache"
+        }
+
+        response = requests.get(url, headers = headers)
+        self.assertEqual(response.status_code, 200, 'Diagnosis failed.')
+
+
+if __name__ == '__main__':
+    print 'Test Diagnogis for Kylin sample.'
+    unittest.main()


[39/50] [abbrv] kylin git commit: KYLIN-2007 CUBOID_CACHE is not cleared when rebuilding ALL cache

Posted by sh...@apache.org.
KYLIN-2007 CUBOID_CACHE is not cleared when rebuilding ALL cache


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/c48baba9
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/c48baba9
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/c48baba9

Branch: refs/heads/KYLIN-1726
Commit: c48baba9fe7e9a5fe374aa83c38987b1931f5766
Parents: 617734b
Author: gaodayue <ga...@meituan.com>
Authored: Wed Sep 14 10:55:22 2016 +0800
Committer: gaodayue <ga...@meituan.com>
Committed: Wed Sep 14 10:55:22 2016 +0800

----------------------------------------------------------------------
 .../src/main/java/org/apache/kylin/rest/service/CacheService.java  | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/c48baba9/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java b/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java
index 5e2d544..2160e3d 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java
@@ -37,6 +37,7 @@ import org.apache.kylin.common.restclient.Broadcaster;
 import org.apache.kylin.cube.CubeDescManager;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.cuboid.Cuboid;
 import org.apache.kylin.dict.DictionaryManager;
 import org.apache.kylin.engine.streaming.StreamingManager;
 import org.apache.kylin.metadata.MetadataManager;
@@ -204,6 +205,7 @@ public class CacheService extends BasicService {
                 CubeManager.clearCache();
                 HybridManager.clearCache();
                 RealizationRegistry.clearCache();
+                Cuboid.clearCache();
                 ProjectManager.clearCache();
                 KafkaConfigManager.clearCache();
                 StreamingManager.clearCache();


[04/50] [abbrv] kylin git commit: KYLIN 1997 add pivot feature back in query result page

Posted by sh...@apache.org.
KYLIN 1997 add pivot feature back in query result page

Signed-off-by: Jason <ji...@163.com>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/be7751bc
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/be7751bc
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/be7751bc

Branch: refs/heads/KYLIN-1726
Commit: be7751bc021063f89e5955e5f1c50d66bedc5b91
Parents: ded3b58
Author: chenzhx <34...@qq.com>
Authored: Thu Sep 8 16:21:32 2016 +0800
Committer: Jason <ji...@163.com>
Committed: Thu Sep 8 17:51:28 2016 +0800

----------------------------------------------------------------------
 webapp/app/js/app.js                        | 2 +-
 webapp/app/partials/query/query_detail.html | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/be7751bc/webapp/app/js/app.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/app.js b/webapp/app/js/app.js
index 3708037..629617e 100644
--- a/webapp/app/js/app.js
+++ b/webapp/app/js/app.js
@@ -17,4 +17,4 @@
  */
 
 //Kylin Application Module
-KylinApp = angular.module('kylin', ['ngRoute', 'ngResource', 'ngGrid', 'ui.grid', 'ui.grid.resizeColumns', 'ui.bootstrap', 'ui.ace', 'base64', 'angularLocalStorage', 'localytics.directives', 'treeControl', 'nvd3ChartDirectives', 'ngLoadingRequest', 'oitozero.ngSweetAlert', 'ngCookies', 'angular-underscore', 'ngAnimate', 'ui.sortable', 'angularBootstrapNavTree', 'toggle-switch', 'ngSanitize', 'ui.select', 'ui.bootstrap.datetimepicker']);
+KylinApp = angular.module('kylin', ['ngRoute', 'ngResource', 'ngGrid', 'ui.grid', 'ui.grid.resizeColumns', 'ui.grid.grouping', 'ui.bootstrap', 'ui.ace', 'base64', 'angularLocalStorage', 'localytics.directives', 'treeControl', 'nvd3ChartDirectives', 'ngLoadingRequest', 'oitozero.ngSweetAlert', 'ngCookies', 'angular-underscore', 'ngAnimate', 'ui.sortable', 'angularBootstrapNavTree', 'toggle-switch', 'ngSanitize', 'ui.select', 'ui.bootstrap.datetimepicker']);

http://git-wip-us.apache.org/repos/asf/kylin/blob/be7751bc/webapp/app/partials/query/query_detail.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/query/query_detail.html b/webapp/app/partials/query/query_detail.html
index 72da6b7..8e1286c 100644
--- a/webapp/app/partials/query/query_detail.html
+++ b/webapp/app/partials/query/query_detail.html
@@ -96,7 +96,7 @@
     <div ng-if="!curQuery.graph.show">
         <div class="query-results">
             <div ng-if="curQuery.status=='success'">
-                <div class="gridStyle" ui-grid="curQuery.result.gridOptions" ui-grid-resize-columns
+                <div class="gridStyle" ui-grid="curQuery.result.gridOptions" ui-grid-resize-columns  ui-grid-grouping
                      ng-if="curQuery.result.results.length > 0" id="data_grid"
                      style="{{ui.fullScreen?'height: 600px;width:auto':'height: 300px'}}"></div>
                 <div ng-if="!curQuery.result.results || curQuery.result.results.length == 0"


[47/50] [abbrv] kylin git commit: KYLIN-1762 discard job when no stream message

Posted by sh...@apache.org.
KYLIN-1762 discard job when no stream message


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/1108d9ee
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/1108d9ee
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/1108d9ee

Branch: refs/heads/KYLIN-1726
Commit: 1108d9eeccecbccffea0b3f9049151672196c91a
Parents: 1b1b2e3
Author: shaofengshi <sh...@apache.org>
Authored: Sat Sep 3 18:03:05 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Wed Sep 14 16:34:36 2016 +0800

----------------------------------------------------------------------
 .../job/execution/DefaultChainedExecutable.java |  6 +++
 .../kylin/source/kafka/SeekOffsetStep.java      | 45 +++++++++++++++-----
 2 files changed, 41 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/1108d9ee/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java b/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
index 753b389..39a5f4f 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
@@ -88,6 +88,7 @@ public class DefaultChainedExecutable extends AbstractExecutable implements Chai
             boolean allSucceed = true;
             boolean hasError = false;
             boolean hasRunning = false;
+            boolean hasDiscarded = false;
             for (Executable task : jobs) {
                 final ExecutableState status = task.getStatus();
                 if (status == ExecutableState.ERROR) {
@@ -99,6 +100,9 @@ public class DefaultChainedExecutable extends AbstractExecutable implements Chai
                 if (status == ExecutableState.RUNNING) {
                     hasRunning = true;
                 }
+                if (status == ExecutableState.DISCARDED) {
+                    hasDiscarded = true;
+                }
             }
             if (allSucceed) {
                 setEndTime(System.currentTimeMillis());
@@ -110,6 +114,8 @@ public class DefaultChainedExecutable extends AbstractExecutable implements Chai
                 notifyUserStatusChange(executableContext, ExecutableState.ERROR);
             } else if (hasRunning) {
                 jobService.updateJobOutput(getId(), ExecutableState.RUNNING, null, null);
+            } else if (hasDiscarded) {
+                jobService.updateJobOutput(getId(), ExecutableState.DISCARDED, null, null);
             } else {
                 jobService.updateJobOutput(getId(), ExecutableState.READY, null, null);
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/1108d9ee/source-kafka/src/main/java/org/apache/kylin/source/kafka/SeekOffsetStep.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/SeekOffsetStep.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/SeekOffsetStep.java
index 5dca93f..479f1b8 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/SeekOffsetStep.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/SeekOffsetStep.java
@@ -17,6 +17,10 @@
 */
 package org.apache.kylin.source.kafka;
 
+import com.google.common.base.Function;
+import com.google.common.collect.Collections2;
+import com.google.common.collect.Maps;
+import org.apache.commons.math3.util.MathUtils;
 import org.apache.kylin.source.kafka.util.KafkaClient;
 import org.apache.kylin.source.kafka.util.KafkaOffsetMapping;
 import org.apache.kafka.clients.consumer.KafkaConsumer;
@@ -34,6 +38,7 @@ import org.apache.kylin.source.kafka.config.KafkaConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import javax.annotation.Nullable;
 import java.io.IOException;
 import java.util.List;
 import java.util.Map;
@@ -101,19 +106,39 @@ public class SeekOffsetStep extends AbstractExecutable {
             }
         }
 
-        KafkaOffsetMapping.saveOffsetStart(segment, startOffsets);
-        KafkaOffsetMapping.saveOffsetEnd(segment, endOffsets);
+        long totalStartOffset = 0, totalEndOffset = 0;
+        for (Long v : startOffsets.values()) {
+            totalStartOffset += v;
+        }
+        for (Long v : endOffsets.values()) {
+            totalEndOffset += v;
+        }
 
-        segment.setName(CubeSegment.makeSegmentName(0, 0, segment.getSourceOffsetStart(), segment.getSourceOffsetEnd()));
-        CubeUpdate cubeBuilder = new CubeUpdate(cube);
-        cubeBuilder.setToUpdateSegs(segment);
-        try {
-            cubeManager.updateCube(cubeBuilder);
+        if (totalEndOffset > totalStartOffset) {
+            KafkaOffsetMapping.saveOffsetStart(segment, startOffsets);
+            KafkaOffsetMapping.saveOffsetEnd(segment, endOffsets);
+            segment.setName(CubeSegment.makeSegmentName(0, 0, totalStartOffset, totalEndOffset));
+            CubeUpdate cubeBuilder = new CubeUpdate(cube);
+            cubeBuilder.setToUpdateSegs(segment);
+            try {
+                cubeManager.updateCube(cubeBuilder);
+            } catch (IOException e) {
+                return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
+            }
             return new ExecuteResult(ExecuteResult.State.SUCCEED, "succeed");
-        } catch (IOException e) {
-            logger.error("fail to update cube segment offset", e);
-            return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
+        } else {
+            CubeUpdate cubeBuilder = new CubeUpdate(cube);
+            cubeBuilder.setToRemoveSegs(segment);
+            try {
+                cubeManager.updateCube(cubeBuilder);
+            } catch (IOException e) {
+                return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
+            }
+
+            return new ExecuteResult(ExecuteResult.State.DISCARDED, "No new message comes");
         }
+
+
     }
 
 }


[11/50] [abbrv] kylin git commit: minor, remove unnecessary raw measures

Posted by sh...@apache.org.
minor, remove unnecessary raw measures


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/618cf28c
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/618cf28c
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/618cf28c

Branch: refs/heads/KYLIN-1726
Commit: 618cf28c96d6b267d13ff737d2b3c550fc67e176
Parents: a2c875d
Author: Hongbin Ma <ma...@apache.org>
Authored: Fri Sep 9 18:57:01 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Fri Sep 9 18:57:01 2016 +0800

----------------------------------------------------------------------
 .../kylin/measure/topn/TopNMeasureType.java     |   2 +
 .../test_case_data/localmeta/cube_desc/ssb.json | 409 +++++++------
 .../test_kylin_cube_with_slr_desc.json          | 389 +++++++-----
 ...st_kylin_cube_with_view_inner_join_desc.json | 388 +++++++-----
 ...est_kylin_cube_with_view_left_join_desc.json | 388 +++++++-----
 .../test_kylin_cube_without_slr_desc.json       |  58 +-
 ...t_kylin_cube_without_slr_left_join_desc.json | 587 +++++++++++--------
 .../test_streaming_table_cube_desc.json         | 245 ++++----
 8 files changed, 1423 insertions(+), 1043 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/618cf28c/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
index ed22d61..0756056 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
@@ -274,6 +274,8 @@ public class TopNMeasureType extends MeasureType<TopNCounter<ByteArray>> {
 
         if (sum.isSum() == false)
             return false;
+        if (sum.getParameter() == null || sum.getParameter().getColRefs() == null || sum.getParameter().getColRefs().size() == 0)
+            return false;
 
         TblColRef sumCol = sum.getParameter().getColRefs().get(0);
         return sumCol.equals(topnNumCol);

http://git-wip-us.apache.org/repos/asf/kylin/blob/618cf28c/examples/test_case_data/localmeta/cube_desc/ssb.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/ssb.json b/examples/test_case_data/localmeta/cube_desc/ssb.json
index d3ea10b..4903979 100644
--- a/examples/test_case_data/localmeta/cube_desc/ssb.json
+++ b/examples/test_case_data/localmeta/cube_desc/ssb.json
@@ -1,179 +1,256 @@
 {
-  "uuid" : "5c44df30-daec-486e-af90-927bf7851057",
-  "name" : "ssb",
-  "description" : "",
-  "dimensions" : [ {
-    "name" : "SSB.PART_DERIVED",
-    "table" : "SSB.PART",
-    "column" : null,
-    "derived" : [ "P_MFGR", "P_CATEGORY", "P_BRAND" ]
-  }, {
-    "name" : "C_CITY",
-    "table" : "SSB.CUSTOMER",
-    "column" : "C_CITY",
-    "derived" : null
-  }, {
-    "name" : "C_REGION",
-    "table" : "SSB.CUSTOMER",
-    "column" : "C_REGION",
-    "derived" : null
-  }, {
-    "name" : "C_NATION",
-    "table" : "SSB.CUSTOMER",
-    "column" : "C_NATION",
-    "derived" : null
-  }, {
-    "name" : "S_CITY",
-    "table" : "SSB.SUPPLIER",
-    "column" : "S_CITY",
-    "derived" : null
-  }, {
-    "name" : "S_REGION",
-    "table" : "SSB.SUPPLIER",
-    "column" : "S_REGION",
-    "derived" : null
-  }, {
-    "name" : "S_NATION",
-    "table" : "SSB.SUPPLIER",
-    "column" : "S_NATION",
-    "derived" : null
-  }, {
-    "name" : "D_YEAR",
-    "table" : "SSB.DATES",
-    "column" : "D_YEAR",
-    "derived" : null
-  }, {
-    "name" : "D_YEARMONTH",
-    "table" : "SSB.DATES",
-    "column" : "D_YEARMONTH",
-    "derived" : null
-  }, {
-    "name" : "D_YEARMONTHNUM",
-    "table" : "SSB.DATES",
-    "column" : "D_YEARMONTHNUM",
-    "derived" : null
-  }, {
-    "name" : "D_WEEKNUMINYEAR",
-    "table" : "SSB.DATES",
-    "column" : "D_WEEKNUMINYEAR",
-    "derived" : null
-  } ],
-  "measures" : [ {
-    "name" : "_COUNT_",
-    "function" : {
-      "expression" : "COUNT",
-      "parameter" : {
-        "type" : "constant",
-        "value" : "1",
-        "next_parameter" : null
-      },
-      "returntype" : "bigint"
+  "uuid": "5c44df30-daec-486e-af90-927bf7851057",
+  "name": "ssb",
+  "description": "",
+  "dimensions": [
+    {
+      "name": "SSB.PART_DERIVED",
+      "table": "SSB.PART",
+      "column": null,
+      "derived": [
+        "P_MFGR",
+        "P_CATEGORY",
+        "P_BRAND"
+      ]
+    },
+    {
+      "name": "C_CITY",
+      "table": "SSB.CUSTOMER",
+      "column": "C_CITY",
+      "derived": null
+    },
+    {
+      "name": "C_REGION",
+      "table": "SSB.CUSTOMER",
+      "column": "C_REGION",
+      "derived": null
+    },
+    {
+      "name": "C_NATION",
+      "table": "SSB.CUSTOMER",
+      "column": "C_NATION",
+      "derived": null
+    },
+    {
+      "name": "S_CITY",
+      "table": "SSB.SUPPLIER",
+      "column": "S_CITY",
+      "derived": null
+    },
+    {
+      "name": "S_REGION",
+      "table": "SSB.SUPPLIER",
+      "column": "S_REGION",
+      "derived": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "TOTAL_REVENUE",
-    "function" : {
-      "expression" : "SUM",
-      "parameter" : {
-        "type" : "column",
-        "value" : "LO_REVENUE",
-        "next_parameter" : null
+    {
+      "name": "S_NATION",
+      "table": "SSB.SUPPLIER",
+      "column": "S_NATION",
+      "derived": null
+    },
+    {
+      "name": "D_YEAR",
+      "table": "SSB.DATES",
+      "column": "D_YEAR",
+      "derived": null
+    },
+    {
+      "name": "D_YEARMONTH",
+      "table": "SSB.DATES",
+      "column": "D_YEARMONTH",
+      "derived": null
+    },
+    {
+      "name": "D_YEARMONTHNUM",
+      "table": "SSB.DATES",
+      "column": "D_YEARMONTHNUM",
+      "derived": null
+    },
+    {
+      "name": "D_WEEKNUMINYEAR",
+      "table": "SSB.DATES",
+      "column": "D_WEEKNUMINYEAR",
+      "derived": null
+    }
+  ],
+  "measures": [
+    {
+      "name": "_COUNT_",
+      "function": {
+        "expression": "COUNT",
+        "parameter": {
+          "type": "constant",
+          "value": "1",
+          "next_parameter": null
+        },
+        "returntype": "bigint"
       },
-      "returntype" : "bigint"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "TOTAL_SUPPLYCOST",
-    "function" : {
-      "expression" : "SUM",
-      "parameter" : {
-        "type" : "column",
-        "value" : "LO_SUPPLYCOST",
-        "next_parameter" : null
+    {
+      "name": "TOTAL_REVENUE",
+      "function": {
+        "expression": "SUM",
+        "parameter": {
+          "type": "column",
+          "value": "LO_REVENUE",
+          "next_parameter": null
+        },
+        "returntype": "bigint"
       },
-      "returntype" : "bigint"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "TOTAL_V_REVENUE",
-    "function" : {
-      "expression" : "SUM",
-      "parameter" : {
-        "type" : "column",
-        "value" : "V_REVENUE",
-        "next_parameter" : null
+    {
+      "name": "TOTAL_SUPPLYCOST",
+      "function": {
+        "expression": "SUM",
+        "parameter": {
+          "type": "column",
+          "value": "LO_SUPPLYCOST",
+          "next_parameter": null
+        },
+        "returntype": "bigint"
       },
-      "returntype" : "bigint"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  } ],
-  "rowkey" : {
-    "rowkey_columns" : [ {
-      "column" : "LO_PARTKEY",
-      "encoding" : "dict"
-    }, {
-      "column" : "C_CITY",
-      "encoding" : "dict"
-    }, {
-      "column" : "C_REGION",
-      "encoding" : "dict"
-    }, {
-      "column" : "C_NATION",
-      "encoding" : "dict"
-    }, {
-      "column" : "S_CITY",
-      "encoding" : "dict"
-    }, {
-      "column" : "S_REGION",
-      "encoding" : "dict"
-    }, {
-      "column" : "S_NATION",
-      "encoding" : "dict"
-    }, {
-      "column" : "D_YEAR",
-      "encoding" : "dict"
-    }, {
-      "column" : "D_YEARMONTH",
-      "encoding" : "dict"
-    }, {
-      "column" : "D_YEARMONTHNUM",
-      "encoding" : "dict"
-    }, {
-      "column" : "D_WEEKNUMINYEAR",
-      "encoding" : "dict"
-    } ]
+    {
+      "name": "TOTAL_V_REVENUE",
+      "function": {
+        "expression": "SUM",
+        "parameter": {
+          "type": "column",
+          "value": "V_REVENUE",
+          "next_parameter": null
+        },
+        "returntype": "bigint"
+      },
+      "dependent_measure_ref": null
+    }
+  ],
+  "rowkey": {
+    "rowkey_columns": [
+      {
+        "column": "LO_PARTKEY",
+        "encoding": "dict"
+      },
+      {
+        "column": "C_CITY",
+        "encoding": "dict"
+      },
+      {
+        "column": "C_REGION",
+        "encoding": "dict"
+      },
+      {
+        "column": "C_NATION",
+        "encoding": "dict"
+      },
+      {
+        "column": "S_CITY",
+        "encoding": "dict"
+      },
+      {
+        "column": "S_REGION",
+        "encoding": "dict"
+      },
+      {
+        "column": "S_NATION",
+        "encoding": "dict"
+      },
+      {
+        "column": "D_YEAR",
+        "encoding": "dict"
+      },
+      {
+        "column": "D_YEARMONTH",
+        "encoding": "dict"
+      },
+      {
+        "column": "D_YEARMONTHNUM",
+        "encoding": "dict"
+      },
+      {
+        "column": "D_WEEKNUMINYEAR",
+        "encoding": "dict"
+      }
+    ]
   },
-  "signature" : "5iV8LVYs+PmVUju8QNQ5TQ==",
-  "last_modified" : 1457503036686,
-  "model_name" : "ssb",
-  "null_string" : null,
-  "hbase_mapping" : {
-    "column_family" : [ {
-      "name" : "F1",
-      "columns" : [ {
-        "qualifier" : "M",
-        "measure_refs" : [ "_COUNT_", "TOTAL_REVENUE", "TOTAL_SUPPLYCOST", "TOTAL_V_REVENUE" ]
-      } ]
-    } ]
+  "signature": "5iV8LVYs+PmVUju8QNQ5TQ==",
+  "last_modified": 1457503036686,
+  "model_name": "ssb",
+  "null_string": null,
+  "hbase_mapping": {
+    "column_family": [
+      {
+        "name": "F1",
+        "columns": [
+          {
+            "qualifier": "M",
+            "measure_refs": [
+              "_COUNT_",
+              "TOTAL_REVENUE",
+              "TOTAL_SUPPLYCOST",
+              "TOTAL_V_REVENUE"
+            ]
+          }
+        ]
+      }
+    ]
   },
-  "aggregation_groups" : [ {
-    "includes" : [ "LO_PARTKEY", "C_CITY", "C_REGION", "C_NATION", "S_CITY", "S_REGION", "S_NATION", "D_YEAR", "D_YEARMONTH", "D_YEARMONTHNUM", "D_WEEKNUMINYEAR" ],
-    "select_rule" : {
-      "hierarchy_dims" : [ [ "C_REGION", "C_NATION", "C_CITY" ], [ "S_REGION", "S_NATION", "S_CITY" ], [ "D_YEARMONTH", "D_YEARMONTHNUM", "D_WEEKNUMINYEAR" ] ],
-      "mandatory_dims" : [ "D_YEAR" ],
-      "joint_dims" : [ ]
+  "aggregation_groups": [
+    {
+      "includes": [
+        "LO_PARTKEY",
+        "C_CITY",
+        "C_REGION",
+        "C_NATION",
+        "S_CITY",
+        "S_REGION",
+        "S_NATION",
+        "D_YEAR",
+        "D_YEARMONTH",
+        "D_YEARMONTHNUM",
+        "D_WEEKNUMINYEAR"
+      ],
+      "select_rule": {
+        "hierarchy_dims": [
+          [
+            "C_REGION",
+            "C_NATION",
+            "C_CITY"
+          ],
+          [
+            "S_REGION",
+            "S_NATION",
+            "S_CITY"
+          ],
+          [
+            "D_YEARMONTH",
+            "D_YEARMONTHNUM",
+            "D_WEEKNUMINYEAR"
+          ]
+        ],
+        "mandatory_dims": [
+          "D_YEAR"
+        ],
+        "joint_dims": []
+      }
     }
-  } ],
-  "notify_list" : [ ],
-  "status_need_notify" : [ ],
-  "partition_date_start" : 694224000000,
-  "partition_date_end" : 3153600000000,
-  "auto_merge_time_ranges" : [ 604800000, 2419200000 ],
-  "retention_range" : 0,
-  "engine_type" : 2,
-  "storage_type" : 2,
-  "override_kylin_properties" : {
-    "kylin.hbase.default.compression.codec" : "lz4",
-    "kylin.cube.aggrgroup.isMandatoryOnlyValid" : "true"
+  ],
+  "notify_list": [],
+  "status_need_notify": [],
+  "partition_date_start": 694224000000,
+  "partition_date_end": 3153600000000,
+  "auto_merge_time_ranges": [
+    604800000,
+    2419200000
+  ],
+  "retention_range": 0,
+  "engine_type": 2,
+  "storage_type": 2,
+  "override_kylin_properties": {
+    "kylin.hbase.default.compression.codec": "lz4",
+    "kylin.cube.aggrgroup.isMandatoryOnlyValid": "true"
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/618cf28c/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_slr_desc.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_slr_desc.json b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_slr_desc.json
index 4064fcb..f62d196 100644
--- a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_slr_desc.json
+++ b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_slr_desc.json
@@ -1,172 +1,245 @@
 {
-  "uuid" : "a24ca905-1fc6-4f67-985c-38fa5aeafd92",
- 
-  "name" : "test_kylin_cube_with_slr_desc",
-  "description" : null,
-  "dimensions" : [ {
-    "name" : "CAL_DT",
-    "table" : "EDW.TEST_CAL_DT",
-    "column" : "{FK}",
-    "derived" : [ "WEEK_BEG_DT" ]
-  }, {
-    "name" : "CATEGORY",
-    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
-    "column" : "{FK}",
-    "derived" : [ "USER_DEFINED_FIELD1", "USER_DEFINED_FIELD3", "UPD_DATE", "UPD_USER" ]
-  }, {
-    "name" : "CATEGORY_HIERARCHY",
-    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
-    "column" : "META_CATEG_NAME",
-    "derived" : null
-  }, {
-    "name" : "CATEGORY_HIERARCHY",
-    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
-    "column" : "CATEG_LVL2_NAME",
-    "derived" : null
-  }, {
-    "name" : "CATEGORY_HIERARCHY",
-    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
-    "column" : "CATEG_LVL3_NAME",
-    "derived" : null
-  }, {
-    "name" : "LSTG_FORMAT_NAME",
-    "table" : "DEFAULT.TEST_KYLIN_FACT",
-    "column" : "LSTG_FORMAT_NAME",
-    "derived" : null
-  }, {
-    "name" : "SITE_ID",
-    "table" : "EDW.TEST_SITES",
-    "column" : "{FK}",
-    "derived" : [ "SITE_NAME", "CRE_USER" ]
-  }, {
-    "name" : "SELLER_TYPE_CD",
-    "table" : "EDW.TEST_SELLER_TYPE_DIM",
-    "column" : "{FK}",
-    "derived" : [ "SELLER_TYPE_DESC" ]
-  }, {
-    "name" : "SELLER_ID",
-    "table" : "DEFAULT.TEST_KYLIN_FACT",
-    "column" : "SELLER_ID",
-    "derived" : null
-  } ],
-  "measures" : [ {
-    "name" : "GMV_SUM",
-    "function" : {
-      "expression" : "SUM",
-      "parameter" : {
-        "type" : "column",
-        "value" : "PRICE",
-        "next_parameter" : null
-      },
-      "returntype" : "decimal(19,4)"
+  "uuid": "a24ca905-1fc6-4f67-985c-38fa5aeafd92",
+  "name": "test_kylin_cube_with_slr_desc",
+  "description": null,
+  "dimensions": [
+    {
+      "name": "CAL_DT",
+      "table": "EDW.TEST_CAL_DT",
+      "column": "{FK}",
+      "derived": [
+        "WEEK_BEG_DT"
+      ]
+    },
+    {
+      "name": "CATEGORY",
+      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
+      "column": "{FK}",
+      "derived": [
+        "USER_DEFINED_FIELD1",
+        "USER_DEFINED_FIELD3",
+        "UPD_DATE",
+        "UPD_USER"
+      ]
+    },
+    {
+      "name": "CATEGORY_HIERARCHY",
+      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
+      "column": "META_CATEG_NAME",
+      "derived": null
+    },
+    {
+      "name": "CATEGORY_HIERARCHY",
+      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
+      "column": "CATEG_LVL2_NAME",
+      "derived": null
+    },
+    {
+      "name": "CATEGORY_HIERARCHY",
+      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
+      "column": "CATEG_LVL3_NAME",
+      "derived": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "GMV_MIN",
-    "function" : {
-      "expression" : "MIN",
-      "parameter" : {
-        "type" : "column",
-        "value" : "PRICE",
-        "next_parameter" : null
+    {
+      "name": "LSTG_FORMAT_NAME",
+      "table": "DEFAULT.TEST_KYLIN_FACT",
+      "column": "LSTG_FORMAT_NAME",
+      "derived": null
+    },
+    {
+      "name": "SITE_ID",
+      "table": "EDW.TEST_SITES",
+      "column": "{FK}",
+      "derived": [
+        "SITE_NAME",
+        "CRE_USER"
+      ]
+    },
+    {
+      "name": "SELLER_TYPE_CD",
+      "table": "EDW.TEST_SELLER_TYPE_DIM",
+      "column": "{FK}",
+      "derived": [
+        "SELLER_TYPE_DESC"
+      ]
+    },
+    {
+      "name": "SELLER_ID",
+      "table": "DEFAULT.TEST_KYLIN_FACT",
+      "column": "SELLER_ID",
+      "derived": null
+    }
+  ],
+  "measures": [
+    {
+      "name": "GMV_SUM",
+      "function": {
+        "expression": "SUM",
+        "parameter": {
+          "type": "column",
+          "value": "PRICE",
+          "next_parameter": null
+        },
+        "returntype": "decimal(19,4)"
       },
-      "returntype" : "decimal(19,4)"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "GMV_MAX",
-    "function" : {
-      "expression" : "MAX",
-      "parameter" : {
-        "type" : "column",
-        "value" : "PRICE",
-        "next_parameter" : null
+    {
+      "name": "GMV_MIN",
+      "function": {
+        "expression": "MIN",
+        "parameter": {
+          "type": "column",
+          "value": "PRICE",
+          "next_parameter": null
+        },
+        "returntype": "decimal(19,4)"
       },
-      "returntype" : "decimal(19,4)"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "TRANS_CNT",
-    "function" : {
-      "expression" : "COUNT",
-      "parameter" : {
-        "type" : "constant",
-        "value" : "1",
-        "next_parameter" : null
+    {
+      "name": "GMV_MAX",
+      "function": {
+        "expression": "MAX",
+        "parameter": {
+          "type": "column",
+          "value": "PRICE",
+          "next_parameter": null
+        },
+        "returntype": "decimal(19,4)"
       },
-      "returntype" : "bigint"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "ITEM_COUNT_SUM",
-    "function" : {
-      "expression" : "SUM",
-      "parameter" : {
-        "type" : "column",
-        "value" : "ITEM_COUNT",
-        "next_parameter" : null
+    {
+      "name": "TRANS_CNT",
+      "function": {
+        "expression": "COUNT",
+        "parameter": {
+          "type": "constant",
+          "value": "1",
+          "next_parameter": null
+        },
+        "returntype": "bigint"
       },
-      "returntype" : "bigint"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  } ],
-  "rowkey" : {
-    "rowkey_columns" : [ {
-      "column" : "seller_id",
-      "encoding" : "int:4",
-      "isShardBy" : true
-    }, {
-      "column" : "cal_dt",
-      "encoding" : "dict"
-    }, {
-      "column" : "leaf_categ_id",
-      "encoding" : "fixed_length:18"
-    }, {
-      "column" : "meta_categ_name",
-      "encoding" : "dict"
-    }, {
-      "column" : "categ_lvl2_name",
-      "encoding" : "dict"
-    }, {
-      "column" : "categ_lvl3_name",
-      "encoding" : "dict"
-    }, {
-      "column" : "lstg_format_name",
-      "encoding" : "fixed_length:12"
-    }, {
-      "column" : "lstg_site_id",
-      "encoding" : "dict"
-    }, {
-      "column" : "slr_segment_cd",
-      "encoding" : "dict"
-    } ]
+    {
+      "name": "ITEM_COUNT_SUM",
+      "function": {
+        "expression": "SUM",
+        "parameter": {
+          "type": "column",
+          "value": "ITEM_COUNT",
+          "next_parameter": null
+        },
+        "returntype": "bigint"
+      },
+      "dependent_measure_ref": null
+    }
+  ],
+  "rowkey": {
+    "rowkey_columns": [
+      {
+        "column": "seller_id",
+        "encoding": "int:4",
+        "isShardBy": true
+      },
+      {
+        "column": "cal_dt",
+        "encoding": "dict"
+      },
+      {
+        "column": "leaf_categ_id",
+        "encoding": "fixed_length:18"
+      },
+      {
+        "column": "meta_categ_name",
+        "encoding": "dict"
+      },
+      {
+        "column": "categ_lvl2_name",
+        "encoding": "dict"
+      },
+      {
+        "column": "categ_lvl3_name",
+        "encoding": "dict"
+      },
+      {
+        "column": "lstg_format_name",
+        "encoding": "fixed_length:12"
+      },
+      {
+        "column": "lstg_site_id",
+        "encoding": "dict"
+      },
+      {
+        "column": "slr_segment_cd",
+        "encoding": "dict"
+      }
+    ]
   },
-  "signature" : null,
-  "last_modified" : 1448959801271,
-  "model_name" : "test_kylin_inner_join_model_desc",
-  "null_string" : null,
-  "hbase_mapping" : {
-    "column_family" : [ {
-      "name" : "f1",
-      "columns" : [ {
-        "qualifier" : "m",
-        "measure_refs" : [ "gmv_sum", "gmv_min", "gmv_max", "trans_cnt", "item_count_sum" ]
-      } ]
-    } ]
+  "signature": null,
+  "last_modified": 1448959801271,
+  "model_name": "test_kylin_inner_join_model_desc",
+  "null_string": null,
+  "hbase_mapping": {
+    "column_family": [
+      {
+        "name": "f1",
+        "columns": [
+          {
+            "qualifier": "m",
+            "measure_refs": [
+              "gmv_sum",
+              "gmv_min",
+              "gmv_max",
+              "trans_cnt",
+              "item_count_sum"
+            ]
+          }
+        ]
+      }
+    ]
   },
-  "aggregation_groups" : [ {
-    "includes" : [ "cal_dt", "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "lstg_format_name", "lstg_site_id", "meta_categ_name", "seller_id", "slr_segment_cd" ],
-    "select_rule" : {
-      "hierarchy_dims" : [ [ "META_CATEG_NAME", "CATEG_LVL2_NAME", "CATEG_LVL3_NAME" ] ],
-      "mandatory_dims" : ["seller_id"],
-      "joint_dims" : [ [ "lstg_format_name", "lstg_site_id", "slr_segment_cd" ] ]
+  "aggregation_groups": [
+    {
+      "includes": [
+        "cal_dt",
+        "categ_lvl2_name",
+        "categ_lvl3_name",
+        "leaf_categ_id",
+        "lstg_format_name",
+        "lstg_site_id",
+        "meta_categ_name",
+        "seller_id",
+        "slr_segment_cd"
+      ],
+      "select_rule": {
+        "hierarchy_dims": [
+          [
+            "META_CATEG_NAME",
+            "CATEG_LVL2_NAME",
+            "CATEG_LVL3_NAME"
+          ]
+        ],
+        "mandatory_dims": [
+          "seller_id"
+        ],
+        "joint_dims": [
+          [
+            "lstg_format_name",
+            "lstg_site_id",
+            "slr_segment_cd"
+          ]
+        ]
+      }
     }
-  } ],
-  "notify_list" : null,
-  "status_need_notify" : [ ],
-  "auto_merge_time_ranges" : null,
-  "retention_range" : 0,
-  "engine_type" : 2,
-  "storage_type" : 2,
-  "partition_date_start" : 0
+  ],
+  "notify_list": null,
+  "status_need_notify": [],
+  "auto_merge_time_ranges": null,
+  "retention_range": 0,
+  "engine_type": 2,
+  "storage_type": 2,
+  "partition_date_start": 0
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/618cf28c/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_inner_join_desc.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_inner_join_desc.json b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_inner_join_desc.json
index d4c64b5..e3a3e70 100644
--- a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_inner_join_desc.json
+++ b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_inner_join_desc.json
@@ -1,169 +1,249 @@
 {
-  "uuid" : "9876b7a8-3929-4dff-b59d-2100aadc8dbf",
-  "name" : "test_kylin_cube_with_view_inner_join_desc",
-  "description" : null,
-  "dimensions" : [ {
-    "name" : "CAL_DT",
-    "table" : "EDW.V_TEST_CAL_DT",
-    "column" : "{FK}",
-    "derived" : [ "WEEK_BEG_DT" ]
-  }, {
-    "name" : "CATEGORY",
-    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
-    "column" : "{FK}",
-    "derived" : [ "USER_DEFINED_FIELD1", "USER_DEFINED_FIELD3", "UPD_DATE", "UPD_USER" ]
-  }, {
-    "name" : "CATEGORY_HIERARCHY",
-    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
-    "column" : "META_CATEG_NAME",
-    "derived" : null
-  }, {
-    "name" : "CATEGORY_HIERARCHY",
-    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
-    "column" : "CATEG_LVL2_NAME",
-    "derived" : null
-  }, {
-    "name" : "CATEGORY_HIERARCHY",
-    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
-    "column" : "CATEG_LVL3_NAME",
-    "derived" : null
-  }, {
-    "name" : "LSTG_FORMAT_NAME",
-    "table" : "DEFAULT.TEST_KYLIN_FACT",
-    "column" : "LSTG_FORMAT_NAME",
-    "derived" : null
-  }, {
-    "name" : "SITE_ID",
-    "table" : "EDW.TEST_SITES",
-    "column" : "{FK}",
-    "derived" : [ "SITE_NAME", "CRE_USER" ]
-  }, {
-    "name" : "SELLER_TYPE_CD",
-    "table" : "EDW.TEST_SELLER_TYPE_DIM",
-    "column" : "{FK}",
-    "derived" : [ "SELLER_TYPE_DESC" ]
-  } ],
-  "measures" : [ {
-    "name" : "GMV_SUM",
-    "function" : {
-      "expression" : "SUM",
-      "parameter" : {
-        "type" : "column",
-        "value" : "PRICE",
-        "next_parameter" : null
-      },
-      "returntype" : "decimal(19,4)"
+  "uuid": "9876b7a8-3929-4dff-b59d-2100aadc8dbf",
+  "name": "test_kylin_cube_with_view_inner_join_desc",
+  "description": null,
+  "dimensions": [
+    {
+      "name": "CAL_DT",
+      "table": "EDW.V_TEST_CAL_DT",
+      "column": "{FK}",
+      "derived": [
+        "WEEK_BEG_DT"
+      ]
+    },
+    {
+      "name": "CATEGORY",
+      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
+      "column": "{FK}",
+      "derived": [
+        "USER_DEFINED_FIELD1",
+        "USER_DEFINED_FIELD3",
+        "UPD_DATE",
+        "UPD_USER"
+      ]
+    },
+    {
+      "name": "CATEGORY_HIERARCHY",
+      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
+      "column": "META_CATEG_NAME",
+      "derived": null
+    },
+    {
+      "name": "CATEGORY_HIERARCHY",
+      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
+      "column": "CATEG_LVL2_NAME",
+      "derived": null
+    },
+    {
+      "name": "CATEGORY_HIERARCHY",
+      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
+      "column": "CATEG_LVL3_NAME",
+      "derived": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "GMV_MIN",
-    "function" : {
-      "expression" : "MIN",
-      "parameter" : {
-        "type" : "column",
-        "value" : "PRICE",
-        "next_parameter" : null
+    {
+      "name": "LSTG_FORMAT_NAME",
+      "table": "DEFAULT.TEST_KYLIN_FACT",
+      "column": "LSTG_FORMAT_NAME",
+      "derived": null
+    },
+    {
+      "name": "SITE_ID",
+      "table": "EDW.TEST_SITES",
+      "column": "{FK}",
+      "derived": [
+        "SITE_NAME",
+        "CRE_USER"
+      ]
+    },
+    {
+      "name": "SELLER_TYPE_CD",
+      "table": "EDW.TEST_SELLER_TYPE_DIM",
+      "column": "{FK}",
+      "derived": [
+        "SELLER_TYPE_DESC"
+      ]
+    }
+  ],
+  "measures": [
+    {
+      "name": "GMV_SUM",
+      "function": {
+        "expression": "SUM",
+        "parameter": {
+          "type": "column",
+          "value": "PRICE",
+          "next_parameter": null
+        },
+        "returntype": "decimal(19,4)"
       },
-      "returntype" : "decimal(19,4)"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "GMV_MAX",
-    "function" : {
-      "expression" : "MAX",
-      "parameter" : {
-        "type" : "column",
-        "value" : "PRICE",
-        "next_parameter" : null
+    {
+      "name": "GMV_MIN",
+      "function": {
+        "expression": "MIN",
+        "parameter": {
+          "type": "column",
+          "value": "PRICE",
+          "next_parameter": null
+        },
+        "returntype": "decimal(19,4)"
       },
-      "returntype" : "decimal(19,4)"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "TRANS_CNT",
-    "function" : {
-      "expression" : "COUNT",
-      "parameter" : {
-        "type" : "constant",
-        "value" : "1",
-        "next_parameter" : null
+    {
+      "name": "GMV_MAX",
+      "function": {
+        "expression": "MAX",
+        "parameter": {
+          "type": "column",
+          "value": "PRICE",
+          "next_parameter": null
+        },
+        "returntype": "decimal(19,4)"
       },
-      "returntype" : "bigint"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "ITEM_COUNT_SUM",
-    "function" : {
-      "expression" : "SUM",
-      "parameter" : {
-        "type" : "column",
-        "value" : "ITEM_COUNT",
-        "next_parameter" : null
+    {
+      "name": "TRANS_CNT",
+      "function": {
+        "expression": "COUNT",
+        "parameter": {
+          "type": "constant",
+          "value": "1",
+          "next_parameter": null
+        },
+        "returntype": "bigint"
       },
-      "returntype" : "bigint"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }],
-  "rowkey" : {
-    "rowkey_columns" : [ {
-      "column" : "cal_dt",
-      "encoding" : "dict"
-    }, {
-      "column" : "leaf_categ_id",
-      "encoding" : "dict"
-    }, {
-      "column" : "meta_categ_name",
-      "encoding" : "dict"
-    }, {
-      "column" : "categ_lvl2_name",
-      "encoding" : "dict"
-    }, {
-      "column" : "categ_lvl3_name",
-      "encoding" : "dict"
-    }, {
-      "column" : "lstg_format_name",
-      "encoding" : "dict"
-    }, {
-      "column" : "lstg_site_id",
-      "encoding" : "dict"
-    }, {
-      "column" : "slr_segment_cd",
-      "encoding" : "dict"
-    } ]
+    {
+      "name": "ITEM_COUNT_SUM",
+      "function": {
+        "expression": "SUM",
+        "parameter": {
+          "type": "column",
+          "value": "ITEM_COUNT",
+          "next_parameter": null
+        },
+        "returntype": "bigint"
+      },
+      "dependent_measure_ref": null
+    }
+  ],
+  "rowkey": {
+    "rowkey_columns": [
+      {
+        "column": "cal_dt",
+        "encoding": "dict"
+      },
+      {
+        "column": "leaf_categ_id",
+        "encoding": "dict"
+      },
+      {
+        "column": "meta_categ_name",
+        "encoding": "dict"
+      },
+      {
+        "column": "categ_lvl2_name",
+        "encoding": "dict"
+      },
+      {
+        "column": "categ_lvl3_name",
+        "encoding": "dict"
+      },
+      {
+        "column": "lstg_format_name",
+        "encoding": "dict"
+      },
+      {
+        "column": "lstg_site_id",
+        "encoding": "dict"
+      },
+      {
+        "column": "slr_segment_cd",
+        "encoding": "dict"
+      }
+    ]
   },
-  "signature" : null,
-  "last_modified" : 1448959801311,
-  "model_name" : "test_kylin_inner_join_view_model_desc",
-  "null_string" : null,
-  "hbase_mapping" : {
-    "column_family" : [ {
-      "name" : "f1",
-      "columns" : [ {
-        "qualifier" : "m",
-        "measure_refs" : [ "gmv_sum", "gmv_min", "gmv_max", "trans_cnt", "item_count_sum" ]
-      } ]
-    }]
+  "signature": null,
+  "last_modified": 1448959801311,
+  "model_name": "test_kylin_inner_join_view_model_desc",
+  "null_string": null,
+  "hbase_mapping": {
+    "column_family": [
+      {
+        "name": "f1",
+        "columns": [
+          {
+            "qualifier": "m",
+            "measure_refs": [
+              "gmv_sum",
+              "gmv_min",
+              "gmv_max",
+              "trans_cnt",
+              "item_count_sum"
+            ]
+          }
+        ]
+      }
+    ]
   },
-  "aggregation_groups" : [ {
-    "includes" : [ "cal_dt", "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "lstg_format_name", "lstg_site_id", "meta_categ_name"],
-    "select_rule" : {
-      "hierarchy_dims" : [ ],
-      "mandatory_dims" : [ "cal_dt" ],
-      "joint_dims" : [ [ "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "meta_categ_name" ] ]
-    }
-  }, {
-    "includes" : [ "cal_dt", "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "meta_categ_name" ],
-    "select_rule" : {
-      "hierarchy_dims" : [ [ "META_CATEG_NAME", "CATEG_LVL2_NAME", "CATEG_LVL3_NAME" ] ],
-      "mandatory_dims" : [ "cal_dt" ],
-      "joint_dims" : [ ]
+  "aggregation_groups": [
+    {
+      "includes": [
+        "cal_dt",
+        "categ_lvl2_name",
+        "categ_lvl3_name",
+        "leaf_categ_id",
+        "lstg_format_name",
+        "lstg_site_id",
+        "meta_categ_name"
+      ],
+      "select_rule": {
+        "hierarchy_dims": [],
+        "mandatory_dims": [
+          "cal_dt"
+        ],
+        "joint_dims": [
+          [
+            "categ_lvl2_name",
+            "categ_lvl3_name",
+            "leaf_categ_id",
+            "meta_categ_name"
+          ]
+        ]
+      }
+    },
+    {
+      "includes": [
+        "cal_dt",
+        "categ_lvl2_name",
+        "categ_lvl3_name",
+        "leaf_categ_id",
+        "meta_categ_name"
+      ],
+      "select_rule": {
+        "hierarchy_dims": [
+          [
+            "META_CATEG_NAME",
+            "CATEG_LVL2_NAME",
+            "CATEG_LVL3_NAME"
+          ]
+        ],
+        "mandatory_dims": [
+          "cal_dt"
+        ],
+        "joint_dims": []
+      }
     }
-  } ],
-  "notify_list" : null,
-  "status_need_notify" : [ ],
-  "auto_merge_time_ranges" : null,
-  "retention_range" : 0,
-  "engine_type" : 2,
-  "storage_type" : 2,
+  ],
+  "notify_list": null,
+  "status_need_notify": [],
+  "auto_merge_time_ranges": null,
+  "retention_range": 0,
+  "engine_type": 2,
+  "storage_type": 2,
   "partition_date_start": 0
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/618cf28c/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_left_join_desc.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_left_join_desc.json b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_left_join_desc.json
index 0388c0e..b17fbff 100644
--- a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_left_join_desc.json
+++ b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_with_view_left_join_desc.json
@@ -1,169 +1,249 @@
 {
-  "uuid" : "6789b7a8-3929-4dff-b59d-2100aadc8dbf",
-  "name" : "test_kylin_cube_with_view_left_join_desc",
-  "description" : null,
-  "dimensions" : [ {
-    "name" : "CAL_DT",
-    "table" : "EDW.V_TEST_CAL_DT",
-    "column" : "{FK}",
-    "derived" : [ "WEEK_BEG_DT" ]
-  }, {
-    "name" : "CATEGORY",
-    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
-    "column" : "{FK}",
-    "derived" : [ "USER_DEFINED_FIELD1", "USER_DEFINED_FIELD3", "UPD_DATE", "UPD_USER" ]
-  }, {
-    "name" : "CATEGORY_HIERARCHY",
-    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
-    "column" : "META_CATEG_NAME",
-    "derived" : null
-  }, {
-    "name" : "CATEGORY_HIERARCHY",
-    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
-    "column" : "CATEG_LVL2_NAME",
-    "derived" : null
-  }, {
-    "name" : "CATEGORY_HIERARCHY",
-    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
-    "column" : "CATEG_LVL3_NAME",
-    "derived" : null
-  }, {
-    "name" : "LSTG_FORMAT_NAME",
-    "table" : "DEFAULT.TEST_KYLIN_FACT",
-    "column" : "LSTG_FORMAT_NAME",
-    "derived" : null
-  }, {
-    "name" : "SITE_ID",
-    "table" : "EDW.TEST_SITES",
-    "column" : "{FK}",
-    "derived" : [ "SITE_NAME", "CRE_USER" ]
-  }, {
-    "name" : "SELLER_TYPE_CD",
-    "table" : "EDW.TEST_SELLER_TYPE_DIM",
-    "column" : "{FK}",
-    "derived" : [ "SELLER_TYPE_DESC" ]
-  } ],
-  "measures" : [ {
-    "name" : "GMV_SUM",
-    "function" : {
-      "expression" : "SUM",
-      "parameter" : {
-        "type" : "column",
-        "value" : "PRICE",
-        "next_parameter" : null
-      },
-      "returntype" : "decimal(19,4)"
+  "uuid": "6789b7a8-3929-4dff-b59d-2100aadc8dbf",
+  "name": "test_kylin_cube_with_view_left_join_desc",
+  "description": null,
+  "dimensions": [
+    {
+      "name": "CAL_DT",
+      "table": "EDW.V_TEST_CAL_DT",
+      "column": "{FK}",
+      "derived": [
+        "WEEK_BEG_DT"
+      ]
+    },
+    {
+      "name": "CATEGORY",
+      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
+      "column": "{FK}",
+      "derived": [
+        "USER_DEFINED_FIELD1",
+        "USER_DEFINED_FIELD3",
+        "UPD_DATE",
+        "UPD_USER"
+      ]
+    },
+    {
+      "name": "CATEGORY_HIERARCHY",
+      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
+      "column": "META_CATEG_NAME",
+      "derived": null
+    },
+    {
+      "name": "CATEGORY_HIERARCHY",
+      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
+      "column": "CATEG_LVL2_NAME",
+      "derived": null
+    },
+    {
+      "name": "CATEGORY_HIERARCHY",
+      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
+      "column": "CATEG_LVL3_NAME",
+      "derived": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "GMV_MIN",
-    "function" : {
-      "expression" : "MIN",
-      "parameter" : {
-        "type" : "column",
-        "value" : "PRICE",
-        "next_parameter" : null
+    {
+      "name": "LSTG_FORMAT_NAME",
+      "table": "DEFAULT.TEST_KYLIN_FACT",
+      "column": "LSTG_FORMAT_NAME",
+      "derived": null
+    },
+    {
+      "name": "SITE_ID",
+      "table": "EDW.TEST_SITES",
+      "column": "{FK}",
+      "derived": [
+        "SITE_NAME",
+        "CRE_USER"
+      ]
+    },
+    {
+      "name": "SELLER_TYPE_CD",
+      "table": "EDW.TEST_SELLER_TYPE_DIM",
+      "column": "{FK}",
+      "derived": [
+        "SELLER_TYPE_DESC"
+      ]
+    }
+  ],
+  "measures": [
+    {
+      "name": "GMV_SUM",
+      "function": {
+        "expression": "SUM",
+        "parameter": {
+          "type": "column",
+          "value": "PRICE",
+          "next_parameter": null
+        },
+        "returntype": "decimal(19,4)"
       },
-      "returntype" : "decimal(19,4)"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "GMV_MAX",
-    "function" : {
-      "expression" : "MAX",
-      "parameter" : {
-        "type" : "column",
-        "value" : "PRICE",
-        "next_parameter" : null
+    {
+      "name": "GMV_MIN",
+      "function": {
+        "expression": "MIN",
+        "parameter": {
+          "type": "column",
+          "value": "PRICE",
+          "next_parameter": null
+        },
+        "returntype": "decimal(19,4)"
       },
-      "returntype" : "decimal(19,4)"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "TRANS_CNT",
-    "function" : {
-      "expression" : "COUNT",
-      "parameter" : {
-        "type" : "constant",
-        "value" : "1",
-        "next_parameter" : null
+    {
+      "name": "GMV_MAX",
+      "function": {
+        "expression": "MAX",
+        "parameter": {
+          "type": "column",
+          "value": "PRICE",
+          "next_parameter": null
+        },
+        "returntype": "decimal(19,4)"
       },
-      "returntype" : "bigint"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "ITEM_COUNT_SUM",
-    "function" : {
-      "expression" : "SUM",
-      "parameter" : {
-        "type" : "column",
-        "value" : "ITEM_COUNT",
-        "next_parameter" : null
+    {
+      "name": "TRANS_CNT",
+      "function": {
+        "expression": "COUNT",
+        "parameter": {
+          "type": "constant",
+          "value": "1",
+          "next_parameter": null
+        },
+        "returntype": "bigint"
       },
-      "returntype" : "bigint"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }],
-  "rowkey" : {
-    "rowkey_columns" : [ {
-      "column" : "cal_dt",
-      "encoding" : "dict"
-    }, {
-      "column" : "leaf_categ_id",
-      "encoding" : "dict"
-    }, {
-      "column" : "meta_categ_name",
-      "encoding" : "dict"
-    }, {
-      "column" : "categ_lvl2_name",
-      "encoding" : "dict"
-    }, {
-      "column" : "categ_lvl3_name",
-      "encoding" : "dict"
-    }, {
-      "column" : "lstg_format_name",
-      "encoding" : "dict"
-    }, {
-      "column" : "lstg_site_id",
-      "encoding" : "dict"
-    }, {
-      "column" : "slr_segment_cd",
-      "encoding" : "dict"
-    } ]
+    {
+      "name": "ITEM_COUNT_SUM",
+      "function": {
+        "expression": "SUM",
+        "parameter": {
+          "type": "column",
+          "value": "ITEM_COUNT",
+          "next_parameter": null
+        },
+        "returntype": "bigint"
+      },
+      "dependent_measure_ref": null
+    }
+  ],
+  "rowkey": {
+    "rowkey_columns": [
+      {
+        "column": "cal_dt",
+        "encoding": "dict"
+      },
+      {
+        "column": "leaf_categ_id",
+        "encoding": "dict"
+      },
+      {
+        "column": "meta_categ_name",
+        "encoding": "dict"
+      },
+      {
+        "column": "categ_lvl2_name",
+        "encoding": "dict"
+      },
+      {
+        "column": "categ_lvl3_name",
+        "encoding": "dict"
+      },
+      {
+        "column": "lstg_format_name",
+        "encoding": "dict"
+      },
+      {
+        "column": "lstg_site_id",
+        "encoding": "dict"
+      },
+      {
+        "column": "slr_segment_cd",
+        "encoding": "dict"
+      }
+    ]
   },
-  "signature" : null,
-  "last_modified" : 1448959801311,
-  "model_name" : "test_kylin_left_join_view_model_desc",
-  "null_string" : null,
-  "hbase_mapping" : {
-    "column_family" : [ {
-      "name" : "f1",
-      "columns" : [ {
-        "qualifier" : "m",
-        "measure_refs" : [ "gmv_sum", "gmv_min", "gmv_max", "trans_cnt", "item_count_sum" ]
-      } ]
-    }]
+  "signature": null,
+  "last_modified": 1448959801311,
+  "model_name": "test_kylin_left_join_view_model_desc",
+  "null_string": null,
+  "hbase_mapping": {
+    "column_family": [
+      {
+        "name": "f1",
+        "columns": [
+          {
+            "qualifier": "m",
+            "measure_refs": [
+              "gmv_sum",
+              "gmv_min",
+              "gmv_max",
+              "trans_cnt",
+              "item_count_sum"
+            ]
+          }
+        ]
+      }
+    ]
   },
-  "aggregation_groups" : [ {
-    "includes" : [ "cal_dt", "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "lstg_format_name", "lstg_site_id", "meta_categ_name"],
-    "select_rule" : {
-      "hierarchy_dims" : [ ],
-      "mandatory_dims" : [ "cal_dt" ],
-      "joint_dims" : [ [ "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "meta_categ_name" ] ]
-    }
-  }, {
-    "includes" : [ "cal_dt", "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "meta_categ_name" ],
-    "select_rule" : {
-      "hierarchy_dims" : [ [ "META_CATEG_NAME", "CATEG_LVL2_NAME", "CATEG_LVL3_NAME" ] ],
-      "mandatory_dims" : [ "cal_dt" ],
-      "joint_dims" : [ ]
+  "aggregation_groups": [
+    {
+      "includes": [
+        "cal_dt",
+        "categ_lvl2_name",
+        "categ_lvl3_name",
+        "leaf_categ_id",
+        "lstg_format_name",
+        "lstg_site_id",
+        "meta_categ_name"
+      ],
+      "select_rule": {
+        "hierarchy_dims": [],
+        "mandatory_dims": [
+          "cal_dt"
+        ],
+        "joint_dims": [
+          [
+            "categ_lvl2_name",
+            "categ_lvl3_name",
+            "leaf_categ_id",
+            "meta_categ_name"
+          ]
+        ]
+      }
+    },
+    {
+      "includes": [
+        "cal_dt",
+        "categ_lvl2_name",
+        "categ_lvl3_name",
+        "leaf_categ_id",
+        "meta_categ_name"
+      ],
+      "select_rule": {
+        "hierarchy_dims": [
+          [
+            "META_CATEG_NAME",
+            "CATEG_LVL2_NAME",
+            "CATEG_LVL3_NAME"
+          ]
+        ],
+        "mandatory_dims": [
+          "cal_dt"
+        ],
+        "joint_dims": []
+      }
     }
-  } ],
-  "notify_list" : null,
-  "status_need_notify" : [ ],
-  "auto_merge_time_ranges" : null,
-  "retention_range" : 0,
-  "engine_type" : 2,
-  "storage_type" : 2,
+  ],
+  "notify_list": null,
+  "status_need_notify": [],
+  "auto_merge_time_ranges": null,
+  "retention_range": 0,
+  "engine_type": 2,
+  "storage_type": 2,
   "partition_date_start": 0
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/618cf28c/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_desc.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_desc.json b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_desc.json
index 28328e4..7de2ae2 100644
--- a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_desc.json
+++ b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_desc.json
@@ -1,5 +1,4 @@
 {
- 
   "uuid": "9ac9b7a8-3929-4dff-b59d-2100aadc8dbf",
   "name": "test_kylin_cube_without_slr_desc",
   "description": null,
@@ -160,54 +159,19 @@
         "returntype": "extendedcolumn(100)"
       },
       "dependent_measure_ref": null
-    }, {
-      "name" : "CAL_DT_RAW",
-      "function" : {
-        "expression" : "RAW",
-        "parameter" : {
-          "type" : "column",
-          "value" : "CAL_DT",
-          "next_parameter" : null
-        },
-        "returntype" : "raw"
-      },
-      "dependent_measure_ref" : null
-    }, {
-      "name" : "LSTG_FORMAT_NAME_RAW",
-      "function" : {
-        "expression" : "RAW",
-        "parameter" : {
-          "type" : "column",
-          "value" : "LSTG_FORMAT_NAME",
-          "next_parameter" : null
-        },
-        "returntype" : "raw"
-      },
-      "dependent_measure_ref" : null
-    }, {
-      "name" : "LEAF_CATEG_ID_RAW",
-      "function" : {
-        "expression" : "RAW",
-        "parameter" : {
-          "type" : "column",
-          "value" : "LEAF_CATEG_ID",
-          "next_parameter" : null
-        },
-        "returntype" : "raw"
-      },
-      "dependent_measure_ref" : null
-    }, {
-      "name" : "PRICE_RAW",
-      "function" : {
-        "expression" : "RAW",
-        "parameter" : {
-          "type" : "column",
-          "value" : "PRICE",
-          "next_parameter" : null
+    },
+    {
+      "name": "PRICE_RAW",
+      "function": {
+        "expression": "RAW",
+        "parameter": {
+          "type": "column",
+          "value": "PRICE",
+          "next_parameter": null
         },
-        "returntype" : "raw"
+        "returntype": "raw"
       },
-      "dependent_measure_ref" : null
+      "dependent_measure_ref": null
     }
   ],
   "rowkey": {

http://git-wip-us.apache.org/repos/asf/kylin/blob/618cf28c/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json
index ca1b35c..4270aab 100644
--- a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json
+++ b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json
@@ -1,293 +1,360 @@
 {
-  "uuid" : "9ac9b7a8-3929-4dff-b59d-2100aadc8dbf",
-  "name" : "test_kylin_cube_without_slr_left_join_desc",
-  "description" : null,
-  "dimensions" : [ {
-    "name" : "CAL_DT",
-    "table" : "EDW.TEST_CAL_DT",
-    "column" : "{FK}",
-    "derived" : [ "WEEK_BEG_DT" ]
-  }, {
-    "name" : "CATEGORY",
-    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
-    "column" : "{FK}",
-    "derived" : [ "USER_DEFINED_FIELD1", "USER_DEFINED_FIELD3", "UPD_DATE", "UPD_USER" ]
-  }, {
-    "name" : "CATEGORY_HIERARCHY",
-    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
-    "column" : "META_CATEG_NAME",
-    "derived" : null
-  }, {
-    "name" : "CATEGORY_HIERARCHY",
-    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
-    "column" : "CATEG_LVL2_NAME",
-    "derived" : null
-  }, {
-    "name" : "CATEGORY_HIERARCHY",
-    "table" : "DEFAULT.TEST_CATEGORY_GROUPINGS",
-    "column" : "CATEG_LVL3_NAME",
-    "derived" : null
-  }, {
-    "name" : "LSTG_FORMAT_NAME",
-    "table" : "DEFAULT.TEST_KYLIN_FACT",
-    "column" : "LSTG_FORMAT_NAME",
-    "derived" : null
-  }, {
-    "name" : "SITE_ID",
-    "table" : "EDW.TEST_SITES",
-    "column" : "{FK}",
-    "derived" : [ "SITE_NAME", "CRE_USER" ]
-  }, {
-    "name" : "SELLER_TYPE_CD",
-    "table" : "EDW.TEST_SELLER_TYPE_DIM",
-    "column" : "{FK}",
-    "derived" : [ "SELLER_TYPE_DESC" ]
-  } ],
-  "measures" : [ {
-    "name" : "GMV_SUM",
-    "function" : {
-      "expression" : "SUM",
-      "parameter" : {
-        "type" : "column",
-        "value" : "PRICE",
-        "next_parameter" : null
-      },
-      "returntype" : "decimal(19,4)"
+  "uuid": "9ac9b7a8-3929-4dff-b59d-2100aadc8dbf",
+  "name": "test_kylin_cube_without_slr_left_join_desc",
+  "description": null,
+  "dimensions": [
+    {
+      "name": "CAL_DT",
+      "table": "EDW.TEST_CAL_DT",
+      "column": "{FK}",
+      "derived": [
+        "WEEK_BEG_DT"
+      ]
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "GMV_MIN",
-    "function" : {
-      "expression" : "MIN",
-      "parameter" : {
-        "type" : "column",
-        "value" : "PRICE",
-        "next_parameter" : null
-      },
-      "returntype" : "decimal(19,4)"
+    {
+      "name": "CATEGORY",
+      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
+      "column": "{FK}",
+      "derived": [
+        "USER_DEFINED_FIELD1",
+        "USER_DEFINED_FIELD3",
+        "UPD_DATE",
+        "UPD_USER"
+      ]
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "GMV_MAX",
-    "function" : {
-      "expression" : "MAX",
-      "parameter" : {
-        "type" : "column",
-        "value" : "PRICE",
-        "next_parameter" : null
-      },
-      "returntype" : "decimal(19,4)"
+    {
+      "name": "CATEGORY_HIERARCHY",
+      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
+      "column": "META_CATEG_NAME",
+      "derived": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "TRANS_CNT",
-    "function" : {
-      "expression" : "COUNT",
-      "parameter" : {
-        "type" : "constant",
-        "value" : "1",
-        "next_parameter" : null
-      },
-      "returntype" : "bigint"
+    {
+      "name": "CATEGORY_HIERARCHY",
+      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
+      "column": "CATEG_LVL2_NAME",
+      "derived": null
+    },
+    {
+      "name": "CATEGORY_HIERARCHY",
+      "table": "DEFAULT.TEST_CATEGORY_GROUPINGS",
+      "column": "CATEG_LVL3_NAME",
+      "derived": null
+    },
+    {
+      "name": "LSTG_FORMAT_NAME",
+      "table": "DEFAULT.TEST_KYLIN_FACT",
+      "column": "LSTG_FORMAT_NAME",
+      "derived": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "ITEM_COUNT_SUM",
-    "function" : {
-      "expression" : "SUM",
-      "parameter" : {
-        "type" : "column",
-        "value" : "ITEM_COUNT",
-        "next_parameter" : null
+    {
+      "name": "SITE_ID",
+      "table": "EDW.TEST_SITES",
+      "column": "{FK}",
+      "derived": [
+        "SITE_NAME",
+        "CRE_USER"
+      ]
+    },
+    {
+      "name": "SELLER_TYPE_CD",
+      "table": "EDW.TEST_SELLER_TYPE_DIM",
+      "column": "{FK}",
+      "derived": [
+        "SELLER_TYPE_DESC"
+      ]
+    }
+  ],
+  "measures": [
+    {
+      "name": "GMV_SUM",
+      "function": {
+        "expression": "SUM",
+        "parameter": {
+          "type": "column",
+          "value": "PRICE",
+          "next_parameter": null
+        },
+        "returntype": "decimal(19,4)"
       },
-      "returntype" : "bigint"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "SELLER_CNT_BITMAP",
-    "function" : {
-      "expression" : "COUNT_DISTINCT",
-      "parameter" : {
-        "type" : "column",
-        "value" : "SELLER_ID",
-        "next_parameter" : null
+    {
+      "name": "GMV_MIN",
+      "function": {
+        "expression": "MIN",
+        "parameter": {
+          "type": "column",
+          "value": "PRICE",
+          "next_parameter": null
+        },
+        "returntype": "decimal(19,4)"
       },
-      "returntype" : "bitmap"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "SITE_NAME_BITMAP",
-    "function" : {
-      "expression" : "COUNT_DISTINCT",
-      "parameter" : {
-        "type" : "column",
-        "value" : "SITE_NAME",
-        "next_parameter" : null
+    {
+      "name": "GMV_MAX",
+      "function": {
+        "expression": "MAX",
+        "parameter": {
+          "type": "column",
+          "value": "PRICE",
+          "next_parameter": null
+        },
+        "returntype": "decimal(19,4)"
       },
-      "returntype" : "bitmap"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "SELLER_FORMAT_CNT",
-    "function" : {
-      "expression" : "COUNT_DISTINCT",
-      "parameter" : {
-        "type" : "column",
-        "value" : "LSTG_FORMAT_NAME",
-        "next_parameter" : {
-          "type" : "column",
-          "value" : "SELLER_ID",
-          "next_parameter" : null
-        }
+    {
+      "name": "TRANS_CNT",
+      "function": {
+        "expression": "COUNT",
+        "parameter": {
+          "type": "constant",
+          "value": "1",
+          "next_parameter": null
+        },
+        "returntype": "bigint"
       },
-      "returntype" : "hllc(10)"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "TOP_SELLER",
-    "function" : {
-      "expression" : "TOP_N",
-      "parameter" : {
-        "type" : "column",
-        "value" : "PRICE",
-        "next_parameter" : {
-          "type" : "column",
-          "value" : "SELLER_ID",
-          "next_parameter" : null
-        }
+    {
+      "name": "ITEM_COUNT_SUM",
+      "function": {
+        "expression": "SUM",
+        "parameter": {
+          "type": "column",
+          "value": "ITEM_COUNT",
+          "next_parameter": null
+        },
+        "returntype": "bigint"
       },
-      "returntype" : "topn(100)",
-      "configuration": {"topn.encoding.SELLER_ID" : "int:4"}
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "CAL_DT_RAW",
-    "function" : {
-      "expression" : "RAW",
-      "parameter" : {
-        "type" : "column",
-        "value" : "CAL_DT",
-        "next_parameter" : null
+    {
+      "name": "SELLER_CNT_BITMAP",
+      "function": {
+        "expression": "COUNT_DISTINCT",
+        "parameter": {
+          "type": "column",
+          "value": "SELLER_ID",
+          "next_parameter": null
+        },
+        "returntype": "bitmap"
       },
-      "returntype" : "raw"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "LSTG_FORMAT_NAME_RAW",
-    "function" : {
-      "expression" : "RAW",
-      "parameter" : {
-        "type" : "column",
-        "value" : "LSTG_FORMAT_NAME",
-        "next_parameter" : null
+    {
+      "name": "SITE_NAME_BITMAP",
+      "function": {
+        "expression": "COUNT_DISTINCT",
+        "parameter": {
+          "type": "column",
+          "value": "SITE_NAME",
+          "next_parameter": null
+        },
+        "returntype": "bitmap"
       },
-      "returntype" : "raw"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "LEAF_CATEG_ID_RAW",
-    "function" : {
-      "expression" : "RAW",
-      "parameter" : {
-        "type" : "column",
-        "value" : "LEAF_CATEG_ID",
-        "next_parameter" : null
+    {
+      "name": "SELLER_FORMAT_CNT",
+      "function": {
+        "expression": "COUNT_DISTINCT",
+        "parameter": {
+          "type": "column",
+          "value": "LSTG_FORMAT_NAME",
+          "next_parameter": {
+            "type": "column",
+            "value": "SELLER_ID",
+            "next_parameter": null
+          }
+        },
+        "returntype": "hllc(10)"
       },
-      "returntype" : "raw"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "PRICE_RAW",
-    "function" : {
-      "expression" : "RAW",
-      "parameter" : {
-        "type" : "column",
-        "value" : "PRICE",
-        "next_parameter" : null
+    {
+      "name": "TOP_SELLER",
+      "function": {
+        "expression": "TOP_N",
+        "parameter": {
+          "type": "column",
+          "value": "PRICE",
+          "next_parameter": {
+            "type": "column",
+            "value": "SELLER_ID",
+            "next_parameter": null
+          }
+        },
+        "returntype": "topn(100)",
+        "configuration": {
+          "topn.encoding.SELLER_ID": "int:4"
+        }
       },
-      "returntype" : "raw"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  } ],
-  "dictionaries" : [
     {
-      "column" : "SITE_NAME",
+      "name": "PRICE_RAW",
+      "function": {
+        "expression": "RAW",
+        "parameter": {
+          "type": "column",
+          "value": "PRICE",
+          "next_parameter": null
+        },
+        "returntype": "raw"
+      },
+      "dependent_measure_ref": null
+    }
+  ],
+  "dictionaries": [
+    {
+      "column": "SITE_NAME",
       "builder": "org.apache.kylin.dict.GlobalDictionaryBuilder"
     }
   ],
-  "rowkey" : {
-    "rowkey_columns" : [ {
-      "column" : "cal_dt",
-      "encoding" : "dict"
-    }, {
-      "column" : "leaf_categ_id",
-      "encoding" : "dict"
-    }, {
-      "column" : "meta_categ_name",
-      "encoding" : "dict"
-    }, {
-      "column" : "categ_lvl2_name",
-      "encoding" : "dict"
-    }, {
-      "column" : "categ_lvl3_name",
-      "encoding" : "dict"
-    }, {
-      "column" : "lstg_format_name",
-      "encoding" : "dict"
-    }, {
-      "column" : "lstg_site_id",
-      "encoding" : "dict"
-    }, {
-      "column" : "slr_segment_cd",
-      "encoding" : "dict"
-    } ]
+  "rowkey": {
+    "rowkey_columns": [
+      {
+        "column": "cal_dt",
+        "encoding": "dict"
+      },
+      {
+        "column": "leaf_categ_id",
+        "encoding": "dict"
+      },
+      {
+        "column": "meta_categ_name",
+        "encoding": "dict"
+      },
+      {
+        "column": "categ_lvl2_name",
+        "encoding": "dict"
+      },
+      {
+        "column": "categ_lvl3_name",
+        "encoding": "dict"
+      },
+      {
+        "column": "lstg_format_name",
+        "encoding": "dict"
+      },
+      {
+        "column": "lstg_site_id",
+        "encoding": "dict"
+      },
+      {
+        "column": "slr_segment_cd",
+        "encoding": "dict"
+      }
+    ]
   },
-  "signature" : null,
-  "last_modified" : 1448959801311,
-  "model_name" : "test_kylin_left_join_model_desc",
-  "null_string" : null,
-  "hbase_mapping" : {
-    "column_family" : [ {
-      "name" : "f1",
-      "columns" : [ {
-        "qualifier" : "m",
-        "measure_refs" : [ "gmv_sum", "gmv_min", "gmv_max", "trans_cnt", "item_count_sum", "CAL_DT_RAW", "LSTG_FORMAT_NAME_RAW", "LEAF_CATEG_ID_RAW", "PRICE_RAW" ]
-      } ]
-    }, {
-      "name" : "f2",
-      "columns" : [ {
-        "qualifier" : "m",
-        "measure_refs" : [ "seller_cnt_bitmap", "site_name_bitmap", "seller_format_cnt"]
-      } ]
-    }, {
-      "name" : "f3",
-      "columns" : [ {
-        "qualifier" : "m",
-        "measure_refs" : [ "top_seller" ]
-      } ]
-    } ]
+  "signature": null,
+  "last_modified": 1448959801311,
+  "model_name": "test_kylin_left_join_model_desc",
+  "null_string": null,
+  "hbase_mapping": {
+    "column_family": [
+      {
+        "name": "f1",
+        "columns": [
+          {
+            "qualifier": "m",
+            "measure_refs": [
+              "gmv_sum",
+              "gmv_min",
+              "gmv_max",
+              "trans_cnt",
+              "item_count_sum",
+              "CAL_DT_RAW",
+              "LSTG_FORMAT_NAME_RAW",
+              "LEAF_CATEG_ID_RAW",
+              "PRICE_RAW"
+            ]
+          }
+        ]
+      },
+      {
+        "name": "f2",
+        "columns": [
+          {
+            "qualifier": "m",
+            "measure_refs": [
+              "seller_cnt_bitmap",
+              "site_name_bitmap",
+              "seller_format_cnt"
+            ]
+          }
+        ]
+      },
+      {
+        "name": "f3",
+        "columns": [
+          {
+            "qualifier": "m",
+            "measure_refs": [
+              "top_seller"
+            ]
+          }
+        ]
+      }
+    ]
   },
-  "aggregation_groups" : [ {
-    "includes" : [ "cal_dt", "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "lstg_format_name", "lstg_site_id", "meta_categ_name"],
-    "select_rule" : {
-      "hierarchy_dims" : [ ],
-      "mandatory_dims" : [ "cal_dt" ],
-      "joint_dims" : [ [ "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "meta_categ_name" ] ]
-    }
-  }, {
-    "includes" : [ "cal_dt", "categ_lvl2_name", "categ_lvl3_name", "leaf_categ_id", "meta_categ_name" ],
-    "select_rule" : {
-      "hierarchy_dims" : [ [ "META_CATEG_NAME", "CATEG_LVL2_NAME", "CATEG_LVL3_NAME" ] ],
-      "mandatory_dims" : [ "cal_dt" ],
-      "joint_dims" : [ ]
+  "aggregation_groups": [
+    {
+      "includes": [
+        "cal_dt",
+        "categ_lvl2_name",
+        "categ_lvl3_name",
+        "leaf_categ_id",
+        "lstg_format_name",
+        "lstg_site_id",
+        "meta_categ_name"
+      ],
+      "select_rule": {
+        "hierarchy_dims": [],
+        "mandatory_dims": [
+          "cal_dt"
+        ],
+        "joint_dims": [
+          [
+            "categ_lvl2_name",
+            "categ_lvl3_name",
+            "leaf_categ_id",
+            "meta_categ_name"
+          ]
+        ]
+      }
+    },
+    {
+      "includes": [
+        "cal_dt",
+        "categ_lvl2_name",
+        "categ_lvl3_name",
+        "leaf_categ_id",
+        "meta_categ_name"
+      ],
+      "select_rule": {
+        "hierarchy_dims": [
+          [
+            "META_CATEG_NAME",
+            "CATEG_LVL2_NAME",
+            "CATEG_LVL3_NAME"
+          ]
+        ],
+        "mandatory_dims": [
+          "cal_dt"
+        ],
+        "joint_dims": []
+      }
     }
-  } ],
-  "notify_list" : null,
-  "status_need_notify" : [ ],
-  "auto_merge_time_ranges" : null,
-  "retention_range" : 0,
-  "engine_type" : 2,
-  "storage_type" : 2,
+  ],
+  "notify_list": null,
+  "status_need_notify": [],
+  "auto_merge_time_ranges": null,
+  "retention_range": 0,
+  "engine_type": 2,
+  "storage_type": 2,
   "override_kylin_properties": {
     "kylin.job.cubing.inmem.sampling.hll.precision": "16"
   },

http://git-wip-us.apache.org/repos/asf/kylin/blob/618cf28c/examples/test_case_data/localmeta/cube_desc/test_streaming_table_cube_desc.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/test_streaming_table_cube_desc.json b/examples/test_case_data/localmeta/cube_desc/test_streaming_table_cube_desc.json
index ef10c1e..f2c4e72 100644
--- a/examples/test_case_data/localmeta/cube_desc/test_streaming_table_cube_desc.json
+++ b/examples/test_case_data/localmeta/cube_desc/test_streaming_table_cube_desc.json
@@ -1,118 +1,155 @@
 {
-  "uuid" : "901ed15e-7769-4c66-b7ae-fbdc971cd192",
- 
-  "name" : "test_streaming_table_cube_desc",
-  "description" : "",
-  "dimensions" : [ {
-    "name" : "DEFAULT.STREAMING_TABLE.SITE",
-    "table" : "DEFAULT.STREAMING_TABLE",
-    "column" : "SITE",
-    "derived" : null
-  }, {
-    "name" : "DEFAULT.STREAMING_TABLE.ITM",
-    "table" : "DEFAULT.STREAMING_TABLE",
-    "column" : "ITM",
-    "derived" : null
-  }, {
-    "name" : "TIME",
-    "table" : "DEFAULT.STREAMING_TABLE",
-    "column" : "DAY_START",
-    "derived" : null
-  }, {
-    "name" : "TIME",
-    "table" : "DEFAULT.STREAMING_TABLE",
-    "column" : "HOUR_START",
-    "derived" : null
-  }, {
-    "name" : "TIME",
-    "table" : "DEFAULT.STREAMING_TABLE",
-    "column" : "MINUTE_START",
-    "derived" : null
-  } ],
-  "measures" : [ {
-    "name" : "_COUNT_",
-    "function" : {
-      "expression" : "COUNT",
-      "parameter" : {
-        "type" : "constant",
-        "value" : "1",
-        "next_parameter" : null
-      },
-      "returntype" : "bigint"
+  "uuid": "901ed15e-7769-4c66-b7ae-fbdc971cd192",
+  "name": "test_streaming_table_cube_desc",
+  "description": "",
+  "dimensions": [
+    {
+      "name": "DEFAULT.STREAMING_TABLE.SITE",
+      "table": "DEFAULT.STREAMING_TABLE",
+      "column": "SITE",
+      "derived": null
+    },
+    {
+      "name": "DEFAULT.STREAMING_TABLE.ITM",
+      "table": "DEFAULT.STREAMING_TABLE",
+      "column": "ITM",
+      "derived": null
+    },
+    {
+      "name": "TIME",
+      "table": "DEFAULT.STREAMING_TABLE",
+      "column": "DAY_START",
+      "derived": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "GMV_SUM",
-    "function" : {
-      "expression" : "SUM",
-      "parameter" : {
-        "type" : "column",
-        "value" : "GMV",
-        "next_parameter" : null
+    {
+      "name": "TIME",
+      "table": "DEFAULT.STREAMING_TABLE",
+      "column": "HOUR_START",
+      "derived": null
+    },
+    {
+      "name": "TIME",
+      "table": "DEFAULT.STREAMING_TABLE",
+      "column": "MINUTE_START",
+      "derived": null
+    }
+  ],
+  "measures": [
+    {
+      "name": "_COUNT_",
+      "function": {
+        "expression": "COUNT",
+        "parameter": {
+          "type": "constant",
+          "value": "1",
+          "next_parameter": null
+        },
+        "returntype": "bigint"
       },
-      "returntype" : "decimal(19,6)"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  }, {
-    "name" : "ITEM_COUNT_SUM",
-    "function" : {
-      "expression" : "SUM",
-      "parameter" : {
-        "type" : "column",
-        "value" : "ITEM_COUNT",
-        "next_parameter" : null
+    {
+      "name": "GMV_SUM",
+      "function": {
+        "expression": "SUM",
+        "parameter": {
+          "type": "column",
+          "value": "GMV",
+          "next_parameter": null
+        },
+        "returntype": "decimal(19,6)"
       },
-      "returntype" : "bigint"
+      "dependent_measure_ref": null
     },
-    "dependent_measure_ref" : null
-  } ],
-  "rowkey" : {
-    "rowkey_columns" : [ {
-      "column" : "DAY_START",
-      "encoding" : "dict"
-    }, {
-      "column" : "HOUR_START",
-      "encoding" : "dict"
-    }, {
-      "column" : "MINUTE_START",
-      "encoding" : "dict"
-    }, {
-      "column" : "SITE",
-      "encoding" : "dict"
-    }, {
-      "column" : "ITM",
-      "encoding" : "dict"
-    } ]
+    {
+      "name": "ITEM_COUNT_SUM",
+      "function": {
+        "expression": "SUM",
+        "parameter": {
+          "type": "column",
+          "value": "ITEM_COUNT",
+          "next_parameter": null
+        },
+        "returntype": "bigint"
+      },
+      "dependent_measure_ref": null
+    }
+  ],
+  "rowkey": {
+    "rowkey_columns": [
+      {
+        "column": "DAY_START",
+        "encoding": "dict"
+      },
+      {
+        "column": "HOUR_START",
+        "encoding": "dict"
+      },
+      {
+        "column": "MINUTE_START",
+        "encoding": "dict"
+      },
+      {
+        "column": "SITE",
+        "encoding": "dict"
+      },
+      {
+        "column": "ITM",
+        "encoding": "dict"
+      }
+    ]
   },
-  "signature" : null,
-  "last_modified" : 1448959801314,
-  "model_name" : "test_streaming_table_model_desc",
-  "null_string" : null,
-  "hbase_mapping" : {
-    "column_family" : [ {
-      "name" : "F1",
-      "columns" : [ {
-        "qualifier" : "M",
-        "measure_refs" : [ "_COUNT_", "GMV_SUM", "ITEM_COUNT_SUM" ]
-      } ]
-    } ]
+  "signature": null,
+  "last_modified": 1448959801314,
+  "model_name": "test_streaming_table_model_desc",
+  "null_string": null,
+  "hbase_mapping": {
+    "column_family": [
+      {
+        "name": "F1",
+        "columns": [
+          {
+            "qualifier": "M",
+            "measure_refs": [
+              "_COUNT_",
+              "GMV_SUM",
+              "ITEM_COUNT_SUM"
+            ]
+          }
+        ]
+      }
+    ]
   },
-  "aggregation_groups" : [ {
-    "includes" : [ "DAY_START", "HOUR_START", "ITM", "MINUTE_START", "SITE" ],
-    "select_rule" : {
-      "hierarchy_dims" : [ [ "DAY_START", "HOUR_START", "MINUTE_START" ] ],
-      "mandatory_dims" : [ ],
-      "joint_dims" : [ ]
+  "aggregation_groups": [
+    {
+      "includes": [
+        "DAY_START",
+        "HOUR_START",
+        "ITM",
+        "MINUTE_START",
+        "SITE"
+      ],
+      "select_rule": {
+        "hierarchy_dims": [
+          [
+            "DAY_START",
+            "HOUR_START",
+            "MINUTE_START"
+          ]
+        ],
+        "mandatory_dims": [],
+        "joint_dims": []
+      }
     }
-  } ],
+  ],
   "override_kylin_properties": {
     "kylin.cube.algorithm": "inmem"
   },
-  "notify_list" : [ ],
-  "status_need_notify" : [ ],
-  "auto_merge_time_ranges" : null,
-  "retention_range" : 0,
-  "engine_type" : 2,
-  "storage_type" : 2,
+  "notify_list": [],
+  "status_need_notify": [],
+  "auto_merge_time_ranges": null,
+  "retention_range": 0,
+  "engine_type": 2,
+  "storage_type": 2,
   "partition_date_start": 0
 }
\ No newline at end of file


[28/50] [abbrv] kylin git commit: minor refactors on StorageSideBehavior

Posted by sh...@apache.org.
minor refactors on StorageSideBehavior


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/a201c5b0
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/a201c5b0
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/a201c5b0

Branch: refs/heads/KYLIN-1726
Commit: a201c5b0f8e7706ef2cf7cbf9b6d43d3a6bc4a57
Parents: 6db4b17
Author: Hongbin Ma <ma...@apache.org>
Authored: Mon Sep 12 13:41:05 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Mon Sep 12 23:53:48 2016 +0800

----------------------------------------------------------------------
 .../apache/kylin/gridtable/GTScanRequest.java   | 10 +++++-----
 .../kylin/gridtable/StorageSideBehavior.java    | 14 +++++++++++++-
 .../apache/kylin/query/ITKylinQueryTest.java    | 20 ++++++++++++++++++--
 .../observer/AggregationScanner.java            |  6 +++---
 .../hbase/cube/v2/HBaseReadonlyStore.java       |  4 ++++
 .../coprocessor/endpoint/CubeVisitService.java  |  7 ++-----
 6 files changed, 45 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/a201c5b0/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
index 3e57e86..4f68806 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
@@ -158,14 +158,14 @@ public class GTScanRequest {
     }
 
     /**
-     * doFilter,doAggr,doMemCheck are only for profiling use.
+     * filterToggledOn,aggrToggledOn are only for profiling/test use.
      * in normal cases they are all true.
-     * <p/>
+     * 
      * Refer to CoprocessorBehavior for explanation
      */
-    public IGTScanner decorateScanner(IGTScanner scanner, boolean doFilter, boolean doAggr, long deadline) throws IOException {
+    public IGTScanner decorateScanner(IGTScanner scanner, boolean filterToggledOn, boolean aggrToggledOn, long deadline) throws IOException {
         IGTScanner result = scanner;
-        if (!doFilter) { //Skip reading this section if you're not profiling! 
+        if (!filterToggledOn) { //Skip reading this section if you're not profiling! 
             int scanned = lookAndForget(result);
             return new EmptyGTScanner(scanned);
         } else {
@@ -174,7 +174,7 @@ public class GTScanRequest {
                 result = new GTFilterScanner(result, this);
             }
 
-            if (!doAggr) {//Skip reading this section if you're not profiling! 
+            if (!aggrToggledOn) {//Skip reading this section if you're not profiling! 
                 long scanned = result.getScannedRowCount();
                 lookAndForget(result);
                 return new EmptyGTScanner(scanned);

http://git-wip-us.apache.org/repos/asf/kylin/blob/a201c5b0/core-cube/src/main/java/org/apache/kylin/gridtable/StorageSideBehavior.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/StorageSideBehavior.java b/core-cube/src/main/java/org/apache/kylin/gridtable/StorageSideBehavior.java
index 7fa93e7..b01ac3f 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/StorageSideBehavior.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/StorageSideBehavior.java
@@ -26,5 +26,17 @@ public enum StorageSideBehavior {
     SCAN_FILTER, //only scan+filter used,used for profiling filter speed.  Will not return any result
     SCAN_FILTER_AGGR, //aggregate the result.  Will return results
     SCAN_FILTER_AGGR_CHECKMEM, //default full operations. Will return results
-    SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY, // on each scan operation, delay for 10s to simulate slow queries, for test use
+    SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY; // on each scan operation, delay for 10s to simulate slow queries, for test use
+
+    public boolean filterToggledOn() {
+        return this.ordinal() >= SCAN_FILTER.ordinal();
+    }
+
+    public boolean aggrToggledOn() {
+        return this.ordinal() >= SCAN_FILTER_AGGR.ordinal();
+    }
+
+    public boolean delayToggledOn() {
+        return this.ordinal() >= SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY.ordinal();
+    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/a201c5b0/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
index de68c7a..c1c9767 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
@@ -129,10 +129,21 @@ public class ITKylinQueryTest extends KylinTestBase {
         thrown.expect(SQLException.class);
 
         //should not break at table duplicate check, should fail at model duplicate check
-        thrown.expectCause(new BaseMatcher<Throwable>() {
+        thrown.expect(new BaseMatcher<Throwable>() {
             @Override
             public boolean matches(Object item) {
-                if (item instanceof GTScanSelfTerminatedException) {
+
+                //find the "root"
+                Throwable throwable = (Throwable) item;
+                while (true) {
+                    if (throwable.getCause() != null) {
+                        throwable = throwable.getCause();
+                    } else {
+                        break;
+                    }
+                }
+
+                if (throwable instanceof GTScanSelfTerminatedException) {
                     return true;
                 }
                 return false;
@@ -143,6 +154,11 @@ public class ITKylinQueryTest extends KylinTestBase {
             }
         });
 
+        runTimetoutQueries();
+
+    }
+
+    protected void runTimetoutQueries() throws Exception {
         try {
 
             Map<String, String> toggles = Maps.newHashMap();

http://git-wip-us.apache.org/repos/asf/kylin/blob/a201c5b0/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java
index a77f988..a900ea1 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java
@@ -25,9 +25,9 @@ import java.util.List;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.kylin.gridtable.StorageSideBehavior;
 import org.apache.kylin.measure.MeasureAggregator;
 import org.apache.kylin.storage.hbase.common.coprocessor.AggrKey;
-import org.apache.kylin.gridtable.StorageSideBehavior;
 import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorFilter;
 import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorProjector;
 import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorRowType;
@@ -86,11 +86,11 @@ public class AggregationScanner implements RegionScanner {
                     meaninglessByte += cell.getRowArray()[i];
                 }
             } else {
-                if (behavior.ordinal() >= StorageSideBehavior.SCAN_FILTER.ordinal()) {
+                if (behavior.filterToggledOn()) {
                     if (filter != null && filter.evaluate(tuple) == false)
                         continue;
 
-                    if (behavior.ordinal() >= StorageSideBehavior.SCAN_FILTER_AGGR.ordinal()) {
+                    if (behavior.aggrToggledOn()) {
                         AggrKey aggKey = projector.getAggrKey(results);
                         MeasureAggregator[] bufs = aggCache.getBuffer(aggKey);
                         aggregators.aggregate(bufs, results);

http://git-wip-us.apache.org/repos/asf/kylin/blob/a201c5b0/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java
index 1d8ad79..4c02dff 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java
@@ -45,6 +45,10 @@ public class HBaseReadonlyStore implements IGTStore {
     private int rowkeyPreambleSize;
     private boolean withDelay = false;
 
+
+    /**
+     * @param withDelay is for test use
+     */
     public HBaseReadonlyStore(CellListIterator cellListIterator, GTScanRequest gtScanRequest, List<Pair<byte[], byte[]>> hbaseColumns, List<List<Integer>> hbaseColumnsToGT, int rowkeyPreambleSize, boolean withDelay) {
         this.cellListIterator = cellListIterator;
         this.info = gtScanRequest.getInfo();

http://git-wip-us.apache.org/repos/asf/kylin/blob/a201c5b0/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
index 36adca1..ffe41c5 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
@@ -284,13 +284,10 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
                 }
             };
 
-            IGTStore store = new HBaseReadonlyStore(cellListIterator, scanReq, hbaseRawScans.get(0).hbaseColumns, hbaseColumnsToGT, //
-                    request.getRowkeyPreambleSize(), StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY.toString().equals(scanReq.getStorageBehavior()));
+            IGTStore store = new HBaseReadonlyStore(cellListIterator, scanReq, hbaseRawScans.get(0).hbaseColumns, hbaseColumnsToGT, request.getRowkeyPreambleSize(), behavior.delayToggledOn());
 
             IGTScanner rawScanner = store.scan(scanReq);
-            IGTScanner finalScanner = scanReq.decorateScanner(rawScanner, //
-                    behavior.ordinal() >= StorageSideBehavior.SCAN_FILTER.ordinal(), //
-                    behavior.ordinal() >= StorageSideBehavior.SCAN_FILTER_AGGR.ordinal(), deadline);
+            IGTScanner finalScanner = scanReq.decorateScanner(rawScanner, behavior.filterToggledOn(), behavior.aggrToggledOn(), deadline);
 
             ByteBuffer buffer = ByteBuffer.allocate(BufferedMeasureEncoder.DEFAULT_BUFFER_SIZE);
 


[06/50] [abbrv] kylin git commit: KYLIN-1996 Keep original column order when designing cube

Posted by sh...@apache.org.
KYLIN-1996 Keep original column order when designing cube

Signed-off-by: Jason <ji...@163.com>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/e87c816d
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/e87c816d
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/e87c816d

Branch: refs/heads/KYLIN-1726
Commit: e87c816dd7099290220ca09361f9ca04c36a317e
Parents: bf26114
Author: chenzhx <34...@qq.com>
Authored: Fri Sep 9 10:55:36 2016 +0800
Committer: Jason <ji...@163.com>
Committed: Fri Sep 9 15:52:48 2016 +0800

----------------------------------------------------------------------
 webapp/app/js/controllers/cubeDimensions.js | 10 ++--------
 1 file changed, 2 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/e87c816d/webapp/app/js/controllers/cubeDimensions.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/controllers/cubeDimensions.js b/webapp/app/js/controllers/cubeDimensions.js
index 1663b5c..ab07451 100644
--- a/webapp/app/js/controllers/cubeDimensions.js
+++ b/webapp/app/js/controllers/cubeDimensions.js
@@ -67,20 +67,17 @@ KylinApp.controller('CubeDimensionsCtrl', function ($scope, $modal,MetaModel,cub
         var cols = $scope.getDimColumnsByTable(factTable);
 
         // Initialize selected available.
-        var factAvailable = {};
         var factSelectAvailable = {};
 
         for (var i = 0; i < cols.length; i++) {
             cols[i].table = factTable;
             cols[i].isLookup = false;
 
-            factAvailable[cols[i].name] = cols[i];
-
             // Default not selected and not disabled.
             factSelectAvailable[cols[i].name] = {selected: false, disabled: false};
         }
 
-        $scope.availableColumns[factTable] = factAvailable;
+        $scope.availableColumns[factTable] = cols;
         $scope.selectedColumns[factTable] = factSelectAvailable;
         $scope.availableTables.push(factTable);
 
@@ -91,20 +88,17 @@ KylinApp.controller('CubeDimensionsCtrl', function ($scope, $modal,MetaModel,cub
             var cols2 = $scope.getDimColumnsByTable(lookups[j].table);
 
             // Initialize selected available.
-            var lookupAvailable = {};
             var lookupSelectAvailable = {};
 
             for (var k = 0; k < cols2.length; k++) {
                 cols2[k].table = lookups[j].table;
                 cols2[k].isLookup = true;
 
-                lookupAvailable[cols2[k].name] = cols2[k];
-
                 // Default not selected and not disabled.
                 lookupSelectAvailable[cols2[k].name] = {selected: false, disabled: false};
             }
 
-            $scope.availableColumns[lookups[j].table] = lookupAvailable;
+            $scope.availableColumns[lookups[j].table] = cols2;
             $scope.selectedColumns[lookups[j].table] = lookupSelectAvailable;
             if($scope.availableTables.indexOf(lookups[j].table)==-1){
                 $scope.availableTables.push(lookups[j].table);


[03/50] [abbrv] kylin git commit: KYLIN-2003 error start time

Posted by sh...@apache.org.
KYLIN-2003 error start time


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/ded3b585
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/ded3b585
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/ded3b585

Branch: refs/heads/KYLIN-1726
Commit: ded3b58583e76ab1234ac48459c264196c98f04b
Parents: d680169
Author: Jason <ji...@163.com>
Authored: Thu Sep 8 17:47:26 2016 +0800
Committer: Jason <ji...@163.com>
Committed: Thu Sep 8 17:47:26 2016 +0800

----------------------------------------------------------------------
 webapp/app/js/filters/filter.js | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/ded3b585/webapp/app/js/filters/filter.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/filters/filter.js b/webapp/app/js/filters/filter.js
index 4e8d210..f9f7165 100755
--- a/webapp/app/js/filters/filter.js
+++ b/webapp/app/js/filters/filter.js
@@ -152,16 +152,16 @@ KylinApp
       var convertedMillis = item;
       if (gmttimezone.indexOf("GMT+") != -1) {
         var offset = gmttimezone.substr(4, 1);
-        convertedMillis = item + offset * 60 * 60000 + localOffset * 60000;
+        convertedMillis = new Date(item).getTime() + offset * 60 * 60000 + localOffset * 60000;
       }
       else if (gmttimezone.indexOf("GMT-") != -1) {
         var offset = gmttimezone.substr(4, 1);
-        convertedMillis = item - offset * 60 * 60000 + localOffset * 60000;
+        convertedMillis = new Date(item).getTime() - offset * 60 * 60000 + localOffset * 60000;
       }
       else {
         // return PST by default
         timezone = "PST";
-        convertedMillis = item - 8 * 60 * 60000 + localOffset * 60000;
+        convertedMillis = new Date(item).getTime() - 8 * 60 * 60000 + localOffset * 60000;
       }
       return $filter('date')(convertedMillis, format) + " " + timezone;
 


[07/50] [abbrv] kylin git commit: KYLIN-1922 optimize needStorageAggregation check logic and make sure self-termination in coprocessor works

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/core-storage/src/test/java/org/apache/kylin/storage/gtrecord/DictGridTableTest.java
----------------------------------------------------------------------
diff --git a/core-storage/src/test/java/org/apache/kylin/storage/gtrecord/DictGridTableTest.java b/core-storage/src/test/java/org/apache/kylin/storage/gtrecord/DictGridTableTest.java
new file mode 100644
index 0000000..0cdfa7e
--- /dev/null
+++ b/core-storage/src/test/java/org/apache/kylin/storage/gtrecord/DictGridTableTest.java
@@ -0,0 +1,626 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.storage.gtrecord;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.List;
+
+import org.apache.kylin.common.util.ByteArray;
+import org.apache.kylin.common.util.BytesSerializer;
+import org.apache.kylin.common.util.Dictionary;
+import org.apache.kylin.common.util.ImmutableBitSet;
+import org.apache.kylin.common.util.LocalFileMetadataTestCase;
+import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.cube.gridtable.CubeCodeSystem;
+import org.apache.kylin.dict.NumberDictionaryBuilder;
+import org.apache.kylin.dict.StringBytesConverter;
+import org.apache.kylin.dict.TrieDictionaryBuilder;
+import org.apache.kylin.dimension.DictionaryDimEnc;
+import org.apache.kylin.dimension.DimensionEncoding;
+import org.apache.kylin.gridtable.GTBuilder;
+import org.apache.kylin.gridtable.GTInfo;
+import org.apache.kylin.gridtable.GTRecord;
+import org.apache.kylin.gridtable.GTScanRange;
+import org.apache.kylin.gridtable.GTScanRequest;
+import org.apache.kylin.gridtable.GTScanRequestBuilder;
+import org.apache.kylin.gridtable.GTUtil;
+import org.apache.kylin.gridtable.GridTable;
+import org.apache.kylin.gridtable.IGTScanner;
+import org.apache.kylin.gridtable.GTFilterScanner.FilterResultCache;
+import org.apache.kylin.gridtable.GTInfo.Builder;
+import org.apache.kylin.gridtable.memstore.GTSimpleMemStore;
+import org.apache.kylin.metadata.datatype.DataType;
+import org.apache.kylin.metadata.datatype.LongMutable;
+import org.apache.kylin.metadata.filter.ColumnTupleFilter;
+import org.apache.kylin.metadata.filter.CompareTupleFilter;
+import org.apache.kylin.metadata.filter.ConstantTupleFilter;
+import org.apache.kylin.metadata.filter.ExtractTupleFilter;
+import org.apache.kylin.metadata.filter.LogicalTupleFilter;
+import org.apache.kylin.metadata.filter.TupleFilter;
+import org.apache.kylin.metadata.filter.TupleFilter.FilterOperatorEnum;
+import org.apache.kylin.metadata.model.ColumnDesc;
+import org.apache.kylin.metadata.model.TableDesc;
+import org.apache.kylin.metadata.model.TblColRef;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.common.collect.Lists;
+
+public class DictGridTableTest extends LocalFileMetadataTestCase {
+
+    private GridTable table;
+    private GTInfo info;
+    private CompareTupleFilter timeComp0;
+    private CompareTupleFilter timeComp1;
+    private CompareTupleFilter timeComp2;
+    private CompareTupleFilter timeComp3;
+    private CompareTupleFilter timeComp4;
+    private CompareTupleFilter timeComp5;
+    private CompareTupleFilter timeComp6;
+    private CompareTupleFilter ageComp1;
+    private CompareTupleFilter ageComp2;
+    private CompareTupleFilter ageComp3;
+    private CompareTupleFilter ageComp4;
+
+    @After
+    public void after() throws Exception {
+
+        this.cleanupTestMetadata();
+    }
+
+    @Before
+    public void setup() throws IOException {
+
+        this.createTestMetadata();
+
+        table = newTestTable();
+        info = table.getInfo();
+
+        timeComp0 = compare(info.colRef(0), FilterOperatorEnum.LT, enc(info, 0, "2015-01-14"));
+        timeComp1 = compare(info.colRef(0), FilterOperatorEnum.GT, enc(info, 0, "2015-01-14"));
+        timeComp2 = compare(info.colRef(0), FilterOperatorEnum.LT, enc(info, 0, "2015-01-13"));
+        timeComp3 = compare(info.colRef(0), FilterOperatorEnum.LT, enc(info, 0, "2015-01-15"));
+        timeComp4 = compare(info.colRef(0), FilterOperatorEnum.EQ, enc(info, 0, "2015-01-15"));
+        timeComp5 = compare(info.colRef(0), FilterOperatorEnum.GT, enc(info, 0, "2015-01-15"));
+        timeComp6 = compare(info.colRef(0), FilterOperatorEnum.EQ, enc(info, 0, "2015-01-14"));
+        ageComp1 = compare(info.colRef(1), FilterOperatorEnum.EQ, enc(info, 1, "10"));
+        ageComp2 = compare(info.colRef(1), FilterOperatorEnum.EQ, enc(info, 1, "20"));
+        ageComp3 = compare(info.colRef(1), FilterOperatorEnum.EQ, enc(info, 1, "30"));
+        ageComp4 = compare(info.colRef(1), FilterOperatorEnum.NEQ, enc(info, 1, "30"));
+
+    }
+
+    @Test
+    public void verifySegmentSkipping() {
+
+        ByteArray segmentStart = enc(info, 0, "2015-01-14");
+        ByteArray segmentStartX = enc(info, 0, "2015-01-14 00:00:00");//when partition col is dict encoded, time format will be free
+        ByteArray segmentEnd = enc(info, 0, "2015-01-15");
+        assertEquals(segmentStart, segmentStartX);
+
+        {
+            LogicalTupleFilter filter = and(timeComp0, ageComp1);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
+            List<GTScanRange> r = planner.planScanRanges();
+            assertEquals(1, r.size());//scan range are [close,close]
+            assertEquals("[null, 10]-[1421193600000, 10]", r.get(0).toString());
+            assertEquals(1, r.get(0).fuzzyKeys.size());
+            assertEquals("[[null, 10, null, null, null]]", r.get(0).fuzzyKeys.toString());
+        }
+        {
+            LogicalTupleFilter filter = and(timeComp2, ageComp1);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
+            List<GTScanRange> r = planner.planScanRanges();
+            assertEquals(0, r.size());
+        }
+        {
+            LogicalTupleFilter filter = and(timeComp4, ageComp1);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
+            List<GTScanRange> r = planner.planScanRanges();
+            assertEquals(0, r.size());
+        }
+        {
+            LogicalTupleFilter filter = and(timeComp5, ageComp1);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
+            List<GTScanRange> r = planner.planScanRanges();
+            assertEquals(0, r.size());
+        }
+        {
+            LogicalTupleFilter filter = or(and(timeComp2, ageComp1), and(timeComp1, ageComp1), and(timeComp6, ageComp1));
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
+            List<GTScanRange> r = planner.planScanRanges();
+            assertEquals(1, r.size());
+            assertEquals("[1421193600000, 10]-[null, 10]", r.get(0).toString());
+            assertEquals("[[null, 10, null, null, null], [1421193600000, 10, null, null, null]]", r.get(0).fuzzyKeys.toString());
+        }
+        {
+            LogicalTupleFilter filter = or(timeComp2, timeComp1, timeComp6);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
+            List<GTScanRange> r = planner.planScanRanges();
+            assertEquals(1, r.size());
+            assertEquals("[1421193600000, null]-[null, null]", r.get(0).toString());
+            assertEquals(0, r.get(0).fuzzyKeys.size());
+        }
+        {
+            //skip FALSE filter
+            LogicalTupleFilter filter = and(ageComp1, ConstantTupleFilter.FALSE);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
+            List<GTScanRange> r = planner.planScanRanges();
+            assertEquals(0, r.size());
+        }
+        {
+            //TRUE or FALSE filter
+            LogicalTupleFilter filter = or(ConstantTupleFilter.TRUE, ConstantTupleFilter.FALSE);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
+            List<GTScanRange> r = planner.planScanRanges();
+            assertEquals(1, r.size());
+            assertEquals("[null, null]-[null, null]", r.get(0).toString());
+        }
+        {
+            //TRUE or other filter
+            LogicalTupleFilter filter = or(ageComp1, ConstantTupleFilter.TRUE);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
+            List<GTScanRange> r = planner.planScanRanges();
+            assertEquals(1, r.size());
+            assertEquals("[null, null]-[null, null]", r.get(0).toString());
+        }
+    }
+
+    @Test
+    public void verifySegmentSkipping2() {
+        ByteArray segmentEnd = enc(info, 0, "2015-01-15");
+
+        {
+            LogicalTupleFilter filter = and(timeComp0, ageComp1);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(new ByteArray(), segmentEnd), info.colRef(0), filter);
+            List<GTScanRange> r = planner.planScanRanges();
+            assertEquals(1, r.size());//scan range are [close,close]
+            assertEquals("[null, 10]-[1421193600000, 10]", r.get(0).toString());
+            assertEquals(1, r.get(0).fuzzyKeys.size());
+            assertEquals("[[null, 10, null, null, null]]", r.get(0).fuzzyKeys.toString());
+        }
+
+        {
+            LogicalTupleFilter filter = and(timeComp5, ageComp1);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(new ByteArray(), segmentEnd), info.colRef(0), filter);
+            List<GTScanRange> r = planner.planScanRanges();
+            assertEquals(0, r.size());//scan range are [close,close]
+        }
+    }
+
+    @Test
+    public void verifyScanRangePlanner() {
+
+        // flatten or-and & hbase fuzzy value
+        {
+            LogicalTupleFilter filter = and(timeComp1, or(ageComp1, ageComp2));
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, null, null, filter);
+            List<GTScanRange> r = planner.planScanRanges();
+            assertEquals(1, r.size());
+            assertEquals("[1421193600000, 10]-[null, 20]", r.get(0).toString());
+            assertEquals("[[null, 10, null, null, null], [null, 20, null, null, null]]", r.get(0).fuzzyKeys.toString());
+        }
+
+        // pre-evaluate ever false
+        {
+            LogicalTupleFilter filter = and(timeComp1, timeComp2);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, null, null, filter);
+            List<GTScanRange> r = planner.planScanRanges();
+            assertEquals(0, r.size());
+        }
+
+        // pre-evaluate ever true
+        {
+            LogicalTupleFilter filter = or(timeComp1, ageComp4);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, null, null, filter);
+            List<GTScanRange> r = planner.planScanRanges();
+            assertEquals("[[null, null]-[null, null]]", r.toString());
+        }
+
+        // merge overlap range
+        {
+            LogicalTupleFilter filter = or(timeComp1, timeComp3);
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, null, null, filter);
+            List<GTScanRange> r = planner.planScanRanges();
+            assertEquals("[[null, null]-[null, null]]", r.toString());
+        }
+
+        // merge too many ranges
+        {
+            LogicalTupleFilter filter = or(and(timeComp4, ageComp1), and(timeComp4, ageComp2), and(timeComp4, ageComp3));
+            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, null, null, filter);
+            List<GTScanRange> r = planner.planScanRanges();
+            assertEquals(3, r.size());
+            assertEquals("[1421280000000, 10]-[1421280000000, 10]", r.get(0).toString());
+            assertEquals("[1421280000000, 20]-[1421280000000, 20]", r.get(1).toString());
+            assertEquals("[1421280000000, 30]-[1421280000000, 30]", r.get(2).toString());
+            planner.setMaxScanRanges(2);
+            List<GTScanRange> r2 = planner.planScanRanges();
+            assertEquals("[[1421280000000, 10]-[1421280000000, 30]]", r2.toString());
+        }
+    }
+
+    @Test
+    public void verifyFirstRow() throws IOException {
+        doScanAndVerify(table, new GTScanRequestBuilder().setInfo(table.getInfo()).setRanges(null).setDimensions(null).setFilterPushDown(null).createGTScanRequest(), "[1421193600000, 30, Yang, 10, 10.5]", //
+                "[1421193600000, 30, Luke, 10, 10.5]", //
+                "[1421280000000, 20, Dong, 10, 10.5]", //
+                "[1421280000000, 20, Jason, 10, 10.5]", //
+                "[1421280000000, 30, Xu, 10, 10.5]", //
+                "[1421366400000, 20, Mahone, 10, 10.5]", //
+                "[1421366400000, 20, Qianhao, 10, 10.5]", //
+                "[1421366400000, 30, George, 10, 10.5]", //
+                "[1421366400000, 30, Shaofeng, 10, 10.5]", //
+                "[1421452800000, 10, Kejia, 10, 10.5]");
+    }
+
+    //for testing GTScanRequest serialization and deserialization
+    public static GTScanRequest useDeserializedGTScanRequest(GTScanRequest origin) {
+        ByteBuffer buffer = ByteBuffer.allocate(BytesSerializer.SERIALIZE_BUFFER_SIZE);
+        GTScanRequest.serializer.serialize(origin, buffer);
+        buffer.flip();
+        GTScanRequest sGTScanRequest = GTScanRequest.serializer.deserialize(buffer);
+
+        Assert.assertArrayEquals(origin.getAggrMetricsFuncs(), sGTScanRequest.getAggrMetricsFuncs());
+        Assert.assertEquals(origin.getAggCacheMemThreshold(), sGTScanRequest.getAggCacheMemThreshold(), 0.01);
+        return sGTScanRequest;
+    }
+
+    @Test
+    public void verifyScanWithUnevaluatableFilter() throws IOException {
+        GTInfo info = table.getInfo();
+
+        CompareTupleFilter fComp = compare(info.colRef(0), FilterOperatorEnum.GT, enc(info, 0, "2015-01-14"));
+        ExtractTupleFilter fUnevaluatable = unevaluatable(info.colRef(1));
+        LogicalTupleFilter fNotPlusUnevaluatable = not(unevaluatable(info.colRef(1)));
+        LogicalTupleFilter filter = and(fComp, fUnevaluatable, fNotPlusUnevaluatable);
+
+        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null).setAggrGroupBy(setOf(0)).setAggrMetrics(setOf(3)).setAggrMetricsFuncs(new String[] { "sum" }).setFilterPushDown(filter).createGTScanRequest();
+
+        // note the unEvaluatable column 1 in filter is added to group by
+        assertEquals("GTScanRequest [range=[[null, null]-[null, null]], columns={0, 1, 3}, filterPushDown=AND [NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], [null], [null]], aggrGroupBy={0, 1}, aggrMetrics={3}, aggrMetricsFuncs=[sum]]", req.toString());
+
+        doScanAndVerify(table, useDeserializedGTScanRequest(req), "[1421280000000, 20, null, 20, null]", "[1421280000000, 30, null, 10, null]", "[1421366400000, 20, null, 20, null]", "[1421366400000, 30, null, 20, null]", "[1421452800000, 10, null, 10, null]");
+    }
+
+    @Test
+    public void verifyScanWithEvaluatableFilter() throws IOException {
+        GTInfo info = table.getInfo();
+
+        CompareTupleFilter fComp1 = compare(info.colRef(0), FilterOperatorEnum.GT, enc(info, 0, "2015-01-14"));
+        CompareTupleFilter fComp2 = compare(info.colRef(1), FilterOperatorEnum.GT, enc(info, 1, "10"));
+        LogicalTupleFilter filter = and(fComp1, fComp2);
+
+        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null).setAggrGroupBy(setOf(0)).setAggrMetrics(setOf(3)).setAggrMetricsFuncs(new String[] { "sum" }).setFilterPushDown(filter).createGTScanRequest();
+        // note the evaluatable column 1 in filter is added to returned columns but not in group by
+        assertEquals("GTScanRequest [range=[[null, null]-[null, null]], columns={0, 1, 3}, filterPushDown=AND [NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], NULL.GT_MOCKUP_TABLE.1 GT [\\x00]], aggrGroupBy={0}, aggrMetrics={3}, aggrMetricsFuncs=[sum]]", req.toString());
+
+        doScanAndVerify(table, useDeserializedGTScanRequest(req), "[1421280000000, 20, null, 30, null]", "[1421366400000, 20, null, 40, null]");
+    }
+
+    @Test
+    public void testFilterScannerPerf() throws IOException {
+        GridTable table = newTestPerfTable();
+        GTInfo info = table.getInfo();
+
+        CompareTupleFilter fComp1 = compare(info.colRef(0), FilterOperatorEnum.GT, enc(info, 0, "2015-01-14"));
+        CompareTupleFilter fComp2 = compare(info.colRef(1), FilterOperatorEnum.GT, enc(info, 1, "10"));
+        LogicalTupleFilter filter = and(fComp1, fComp2);
+
+        FilterResultCache.ENABLED = false;
+        testFilterScannerPerfInner(table, info, filter);
+        FilterResultCache.ENABLED = true;
+        testFilterScannerPerfInner(table, info, filter);
+        FilterResultCache.ENABLED = false;
+        testFilterScannerPerfInner(table, info, filter);
+        FilterResultCache.ENABLED = true;
+        testFilterScannerPerfInner(table, info, filter);
+    }
+
+    @SuppressWarnings("unused")
+    private void testFilterScannerPerfInner(GridTable table, GTInfo info, LogicalTupleFilter filter) throws IOException {
+        long start = System.currentTimeMillis();
+        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null).setFilterPushDown(filter).createGTScanRequest();
+        IGTScanner scanner = table.scan(req);
+        int i = 0;
+        for (GTRecord r : scanner) {
+            i++;
+        }
+        scanner.close();
+        long end = System.currentTimeMillis();
+        System.out.println((end - start) + "ms with filter cache enabled=" + FilterResultCache.ENABLED + ", " + i + " rows");
+    }
+
+    @Test
+    public void verifyConvertFilterConstants1() {
+        GTInfo info = table.getInfo();
+
+        TableDesc extTable = TableDesc.mockup("ext");
+        TblColRef extColA = ColumnDesc.mockup(extTable, 1, "A", "timestamp").getRef();
+        TblColRef extColB = ColumnDesc.mockup(extTable, 2, "B", "integer").getRef();
+
+        CompareTupleFilter fComp1 = compare(extColA, FilterOperatorEnum.GT, "2015-01-14");
+        CompareTupleFilter fComp2 = compare(extColB, FilterOperatorEnum.EQ, "10");
+        LogicalTupleFilter filter = and(fComp1, fComp2);
+
+        List<TblColRef> colMapping = Lists.newArrayList();
+        colMapping.add(extColA);
+        colMapping.add(extColB);
+
+        TupleFilter newFilter = GTUtil.convertFilterColumnsAndConstants(filter, info, colMapping, null);
+        assertEquals("AND [NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], NULL.GT_MOCKUP_TABLE.1 EQ [\\x00]]", newFilter.toString());
+    }
+
+    @Test
+    public void verifyConvertFilterConstants2() {
+        GTInfo info = table.getInfo();
+
+        TableDesc extTable = TableDesc.mockup("ext");
+        TblColRef extColA = ColumnDesc.mockup(extTable, 1, "A", "timestamp").getRef();
+        TblColRef extColB = ColumnDesc.mockup(extTable, 2, "B", "integer").getRef();
+
+        CompareTupleFilter fComp1 = compare(extColA, FilterOperatorEnum.GT, "2015-01-14");
+        CompareTupleFilter fComp2 = compare(extColB, FilterOperatorEnum.LT, "9");
+        LogicalTupleFilter filter = and(fComp1, fComp2);
+
+        List<TblColRef> colMapping = Lists.newArrayList();
+        colMapping.add(extColA);
+        colMapping.add(extColB);
+
+        // $1<"9" round up to $1<"10"
+        TupleFilter newFilter = GTUtil.convertFilterColumnsAndConstants(filter, info, colMapping, null);
+        assertEquals("AND [NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], NULL.GT_MOCKUP_TABLE.1 LT [\\x00]]", newFilter.toString());
+    }
+
+    @Test
+    public void verifyConvertFilterConstants3() {
+        GTInfo info = table.getInfo();
+
+        TableDesc extTable = TableDesc.mockup("ext");
+        TblColRef extColA = ColumnDesc.mockup(extTable, 1, "A", "timestamp").getRef();
+        TblColRef extColB = ColumnDesc.mockup(extTable, 2, "B", "integer").getRef();
+
+        CompareTupleFilter fComp1 = compare(extColA, FilterOperatorEnum.GT, "2015-01-14");
+        CompareTupleFilter fComp2 = compare(extColB, FilterOperatorEnum.LTE, "9");
+        LogicalTupleFilter filter = and(fComp1, fComp2);
+
+        List<TblColRef> colMapping = Lists.newArrayList();
+        colMapping.add(extColA);
+        colMapping.add(extColB);
+
+        // $1<="9" round down to FALSE
+        TupleFilter newFilter = GTUtil.convertFilterColumnsAndConstants(filter, info, colMapping, null);
+        assertEquals("AND [NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], []]", newFilter.toString());
+    }
+
+    @Test
+    public void verifyConvertFilterConstants4() {
+        GTInfo info = table.getInfo();
+
+        TableDesc extTable = TableDesc.mockup("ext");
+        TblColRef extColA = ColumnDesc.mockup(extTable, 1, "A", "timestamp").getRef();
+        TblColRef extColB = ColumnDesc.mockup(extTable, 2, "B", "integer").getRef();
+
+        CompareTupleFilter fComp1 = compare(extColA, FilterOperatorEnum.GT, "2015-01-14");
+        CompareTupleFilter fComp2 = compare(extColB, FilterOperatorEnum.IN, "9", "10", "15");
+        LogicalTupleFilter filter = and(fComp1, fComp2);
+
+        List<TblColRef> colMapping = Lists.newArrayList();
+        colMapping.add(extColA);
+        colMapping.add(extColB);
+
+        // $1 in ("9", "10", "15") has only "10" left
+        TupleFilter newFilter = GTUtil.convertFilterColumnsAndConstants(filter, info, colMapping, null);
+        assertEquals("AND [NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], NULL.GT_MOCKUP_TABLE.1 IN [\\x00]]", newFilter.toString());
+    }
+
+    private void doScanAndVerify(GridTable table, GTScanRequest req, String... verifyRows) throws IOException {
+        System.out.println(req);
+        IGTScanner scanner = table.scan(req);
+        int i = 0;
+        for (GTRecord r : scanner) {
+            System.out.println(r);
+            if (verifyRows == null || i >= verifyRows.length) {
+                Assert.fail();
+            }
+            assertEquals(verifyRows[i], r.toString());
+            i++;
+        }
+        scanner.close();
+    }
+
+    public static ByteArray enc(GTInfo info, int col, String value) {
+        ByteBuffer buf = ByteBuffer.allocate(info.getMaxColumnLength());
+        info.getCodeSystem().encodeColumnValue(col, value, buf);
+        return ByteArray.copyOf(buf.array(), buf.arrayOffset(), buf.position());
+    }
+
+    public static ExtractTupleFilter unevaluatable(TblColRef col) {
+        ExtractTupleFilter r = new ExtractTupleFilter(FilterOperatorEnum.EXTRACT);
+        r.addChild(new ColumnTupleFilter(col));
+        return r;
+    }
+
+    public static CompareTupleFilter compare(TblColRef col, FilterOperatorEnum op, Object... value) {
+        CompareTupleFilter result = new CompareTupleFilter(op);
+        result.addChild(new ColumnTupleFilter(col));
+        result.addChild(new ConstantTupleFilter(Arrays.asList(value)));
+        return result;
+    }
+
+    public static LogicalTupleFilter and(TupleFilter... children) {
+        return logic(FilterOperatorEnum.AND, children);
+    }
+
+    public static LogicalTupleFilter or(TupleFilter... children) {
+        return logic(FilterOperatorEnum.OR, children);
+    }
+
+    public static LogicalTupleFilter not(TupleFilter child) {
+        return logic(FilterOperatorEnum.NOT, child);
+    }
+
+    public static LogicalTupleFilter logic(FilterOperatorEnum op, TupleFilter... children) {
+        LogicalTupleFilter result = new LogicalTupleFilter(op);
+        for (TupleFilter c : children) {
+            result.addChild(c);
+        }
+        return result;
+    }
+
+    public static GridTable newTestTable() throws IOException {
+        GTInfo info = newInfo();
+        GTSimpleMemStore store = new GTSimpleMemStore(info);
+        GridTable table = new GridTable(info, store);
+
+        GTRecord r = new GTRecord(table.getInfo());
+        GTBuilder builder = table.rebuild();
+
+        builder.write(r.setValues("2015-01-14", "30", "Yang", new LongMutable(10), new BigDecimal("10.5")));
+        builder.write(r.setValues("2015-01-14", "30", "Luke", new LongMutable(10), new BigDecimal("10.5")));
+        builder.write(r.setValues("2015-01-15", "20", "Dong", new LongMutable(10), new BigDecimal("10.5")));
+        builder.write(r.setValues("2015-01-15", "20", "Jason", new LongMutable(10), new BigDecimal("10.5")));
+        builder.write(r.setValues("2015-01-15", "30", "Xu", new LongMutable(10), new BigDecimal("10.5")));
+        builder.write(r.setValues("2015-01-16", "20", "Mahone", new LongMutable(10), new BigDecimal("10.5")));
+        builder.write(r.setValues("2015-01-16", "20", "Qianhao", new LongMutable(10), new BigDecimal("10.5")));
+        builder.write(r.setValues("2015-01-16", "30", "George", new LongMutable(10), new BigDecimal("10.5")));
+        builder.write(r.setValues("2015-01-16", "30", "Shaofeng", new LongMutable(10), new BigDecimal("10.5")));
+        builder.write(r.setValues("2015-01-17", "10", "Kejia", new LongMutable(10), new BigDecimal("10.5")));
+        builder.close();
+
+        return table;
+    }
+
+    static GridTable newTestPerfTable() throws IOException {
+        GTInfo info = newInfo();
+        GTSimpleMemStore store = new GTSimpleMemStore(info);
+        GridTable table = new GridTable(info, store);
+
+        GTRecord r = new GTRecord(table.getInfo());
+        GTBuilder builder = table.rebuild();
+
+        for (int i = 0; i < 100000; i++) {
+            for (int j = 0; j < 10; j++)
+                builder.write(r.setValues("2015-01-14", "30", "Yang", new LongMutable(10), new BigDecimal("10.5")));
+
+            for (int j = 0; j < 10; j++)
+                builder.write(r.setValues("2015-01-14", "30", "Luke", new LongMutable(10), new BigDecimal("10.5")));
+
+            for (int j = 0; j < 10; j++)
+                builder.write(r.setValues("2015-01-15", "20", "Dong", new LongMutable(10), new BigDecimal("10.5")));
+
+            for (int j = 0; j < 10; j++)
+                builder.write(r.setValues("2015-01-15", "20", "Jason", new LongMutable(10), new BigDecimal("10.5")));
+
+            for (int j = 0; j < 10; j++)
+                builder.write(r.setValues("2015-01-15", "30", "Xu", new LongMutable(10), new BigDecimal("10.5")));
+
+            for (int j = 0; j < 10; j++)
+                builder.write(r.setValues("2015-01-16", "20", "Mahone", new LongMutable(10), new BigDecimal("10.5")));
+
+            for (int j = 0; j < 10; j++)
+                builder.write(r.setValues("2015-01-16", "20", "Qianhao", new LongMutable(10), new BigDecimal("10.5")));
+
+            for (int j = 0; j < 10; j++)
+                builder.write(r.setValues("2015-01-16", "30", "George", new LongMutable(10), new BigDecimal("10.5")));
+
+            for (int j = 0; j < 10; j++)
+                builder.write(r.setValues("2015-01-16", "30", "Shaofeng", new LongMutable(10), new BigDecimal("10.5")));
+
+            for (int j = 0; j < 10; j++)
+                builder.write(r.setValues("2015-01-17", "10", "Kejia", new LongMutable(10), new BigDecimal("10.5")));
+        }
+        builder.close();
+
+        return table;
+    }
+
+    static GTInfo newInfo() {
+        Builder builder = GTInfo.builder();
+        builder.setCodeSystem(newDictCodeSystem());
+        builder.setColumns( //
+                DataType.getType("timestamp"), //
+                DataType.getType("integer"), //
+                DataType.getType("varchar(10)"), //
+                DataType.getType("bigint"), //
+                DataType.getType("decimal") //
+        );
+        builder.setPrimaryKey(setOf(0, 1));
+        builder.setColumnPreferIndex(setOf(0));
+        builder.enableColumnBlock(new ImmutableBitSet[] { setOf(0, 1), setOf(2), setOf(3, 4) });
+        builder.enableRowBlock(4);
+        GTInfo info = builder.build();
+        return info;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static CubeCodeSystem newDictCodeSystem() {
+        DimensionEncoding[] dimEncs = new DimensionEncoding[3];
+        dimEncs[1] = new DictionaryDimEnc(newDictionaryOfInteger());
+        dimEncs[2] = new DictionaryDimEnc(newDictionaryOfString());
+        return new CubeCodeSystem(dimEncs);
+    }
+
+    @SuppressWarnings("rawtypes")
+    private static Dictionary newDictionaryOfString() {
+        TrieDictionaryBuilder<String> builder = new TrieDictionaryBuilder<>(new StringBytesConverter());
+        builder.addValue("Dong");
+        builder.addValue("George");
+        builder.addValue("Jason");
+        builder.addValue("Kejia");
+        builder.addValue("Luke");
+        builder.addValue("Mahone");
+        builder.addValue("Qianhao");
+        builder.addValue("Shaofeng");
+        builder.addValue("Xu");
+        builder.addValue("Yang");
+        return builder.build(0);
+    }
+
+    @SuppressWarnings("rawtypes")
+    private static Dictionary newDictionaryOfInteger() {
+        NumberDictionaryBuilder<String> builder = new NumberDictionaryBuilder<>(new StringBytesConverter());
+        builder.addValue("10");
+        builder.addValue("20");
+        builder.addValue("30");
+        builder.addValue("40");
+        builder.addValue("50");
+        builder.addValue("60");
+        builder.addValue("70");
+        builder.addValue("80");
+        builder.addValue("90");
+        builder.addValue("100");
+        return builder.build(0);
+    }
+
+    public static ImmutableBitSet setOf(int... values) {
+        BitSet set = new BitSet();
+        for (int i : values)
+            set.set(i);
+        return new ImmutableBitSet(set);
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
index 375b198..fc2fd52 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
@@ -22,13 +22,16 @@ import static org.junit.Assert.assertTrue;
 
 import java.io.File;
 import java.sql.DriverManager;
+import java.sql.SQLException;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.debug.BackdoorToggles;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
+import org.apache.kylin.gridtable.GTScanSelfTerminatedException;
 import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.metadata.realization.RealizationType;
 import org.apache.kylin.query.enumerator.OLAPQuery;
@@ -37,18 +40,26 @@ import org.apache.kylin.query.routing.Candidate;
 import org.apache.kylin.query.routing.rules.RemoveBlackoutRealizationsRule;
 import org.apache.kylin.query.schema.OLAPSchemaFactory;
 import org.apache.kylin.storage.hbase.HBaseStorage;
+import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorBehavior;
 import org.apache.kylin.storage.hbase.cube.v1.coprocessor.observer.ObserverEnabler;
 import org.dbunit.database.DatabaseConnection;
 import org.dbunit.database.IDatabaseConnection;
+import org.hamcrest.BaseMatcher;
+import org.hamcrest.Description;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.ExpectedException;
 
 import com.google.common.collect.Maps;
 
 public class ITKylinQueryTest extends KylinTestBase {
 
+    @Rule
+    public ExpectedException thrown = ExpectedException.none();
+
     @BeforeClass
     public static void setUp() throws Exception {
         printInfo("setUp in ITKylinQueryTest");
@@ -108,10 +119,52 @@ public class ITKylinQueryTest extends KylinTestBase {
         return "";
     }
 
-    @Ignore("this is only for debug")
+    @Test
+    public void testTimeoutQuery() throws Exception {
+
+        thrown.expect(SQLException.class);
+
+        //should not break at table duplicate check, should fail at model duplicate check
+        thrown.expectCause(new BaseMatcher<Throwable>() {
+            @Override
+            public boolean matches(Object item) {
+                if (item instanceof GTScanSelfTerminatedException) {
+                    return true;
+                }
+                return false;
+            }
+
+            @Override
+            public void describeTo(Description description) {
+            }
+        });
+
+        Map<String, String> toggles = Maps.newHashMap();
+        toggles.put(BackdoorToggles.DEBUG_TOGGLE_COPROCESSOR_BEHAVIOR, CoprocessorBehavior.SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY.toString());//delay 10ms for every scan
+        BackdoorToggles.setToggles(toggles);
+
+        KylinConfig.getInstanceFromEnv().setProperty("kylin.query.cube.visit.timeout.times", "0.03");//set timeout to 9s
+
+        //these two cubes has RAW measure, will disturb limit push down
+        RemoveBlackoutRealizationsRule.blackouts.add("CUBE[name=test_kylin_cube_without_slr_left_join_empty]");
+        RemoveBlackoutRealizationsRule.blackouts.add("CUBE[name=test_kylin_cube_without_slr_inner_join_empty]");
+
+        execAndCompQuery(getQueryFolderPrefix() + "src/test/resources/query/sql_timeout", null, true);
+
+        //these two cubes has RAW measure, will disturb limit push down
+        RemoveBlackoutRealizationsRule.blackouts.remove("CUBE[name=test_kylin_cube_without_slr_left_join_empty]");
+        RemoveBlackoutRealizationsRule.blackouts.remove("CUBE[name=test_kylin_cube_without_slr_inner_join_empty]");
+
+        KylinConfig.getInstanceFromEnv().setProperty("kylin.query.cube.visit.timeout.times", "1");//set timeout to 9s 
+        BackdoorToggles.cleanToggles();
+    }
+
+    //don't try to ignore this test, try to clean your "temp" folder
     @Test
     public void testTempQuery() throws Exception {
+        PRINT_RESULT = true;
         execAndCompQuery(getQueryFolderPrefix() + "src/test/resources/query/temp", null, true);
+        PRINT_RESULT = false;
     }
 
     @Ignore

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/kylin-it/src/test/resources/query/sql_timeout/query01.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql_timeout/query01.sql b/kylin-it/src/test/resources/query/sql_timeout/query01.sql
new file mode 100644
index 0000000..3b9a837
--- /dev/null
+++ b/kylin-it/src/test/resources/query/sql_timeout/query01.sql
@@ -0,0 +1,19 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+
+select * from test_kylin_fact limit 1200

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorBehavior.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorBehavior.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorBehavior.java
index 75533cd..5f21351 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorBehavior.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorBehavior.java
@@ -26,4 +26,5 @@ public enum CoprocessorBehavior {
     SCAN_FILTER, //only scan+filter used,used for profiling filter speed.  Will not return any result
     SCAN_FILTER_AGGR, //aggregate the result.  Will return results
     SCAN_FILTER_AGGR_CHECKMEM, //default full operations. Will return results
+    SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY, // on each scan operation, delay for 10s to simulate slow queries, for test use
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
index 07a3cc3..5b48351 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
@@ -43,8 +43,8 @@ import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.cube.ISegment;
 import org.apache.kylin.cube.cuboid.Cuboid;
 import org.apache.kylin.gridtable.GTInfo;
-import org.apache.kylin.gridtable.GTScanRange;
 import org.apache.kylin.gridtable.GTScanRequest;
+import org.apache.kylin.gridtable.GTScanSelfTerminatedException;
 import org.apache.kylin.gridtable.IGTScanner;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorBehavior;
@@ -106,7 +106,7 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
 
         final String toggle = BackdoorToggles.getCoprocessorBehavior() == null ? CoprocessorBehavior.SCAN_FILTER_AGGR_CHECKMEM.toString() : BackdoorToggles.getCoprocessorBehavior();
 
-        logger.debug("New scanner for current segment {} will use {} as endpoint's behavior", cubeSeg, toggle);
+        logger.info("New scanner for current segment {} will use {} as endpoint's behavior", cubeSeg, toggle);
 
         Pair<Short, Short> shardNumAndBaseShard = getShardNumAndBaseShard();
         short shardNum = shardNumAndBaseShard.getFirst();
@@ -146,7 +146,7 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
                 rawScanBufferSize *= 4;
             }
         }
-        scanRequest.setGTScanRanges(Lists.<GTScanRange> newArrayList());//since raw scans are sent to coprocessor, we don't need to duplicate sending it
+        scanRequest.clearScanRanges();//since raw scans are sent to coprocessor, we don't need to duplicate sending it
 
         int scanRequestBufferSize = BytesSerializer.SERIALIZE_BUFFER_SIZE;
         while (true) {
@@ -248,7 +248,7 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
                     }
 
                     if (abnormalFinish[0]) {
-                        Throwable ex = new RuntimeException(logHeader + "The coprocessor thread stopped itself due to scan timeout or scan threshold(check region server log), failing current query...");
+                        Throwable ex = new GTScanSelfTerminatedException(logHeader + "The coprocessor thread stopped itself due to scan timeout or scan threshold(check region server log), failing current query...");
                         logger.error(logHeader + "Error when visiting cubes by endpoint", ex); // double log coz the query thread may already timeout
                         epResultItr.notifyCoprocException(ex);
                         return;

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
index a359d19..f1e5dab 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
@@ -213,7 +213,7 @@ public class CubeHBaseScanRPC extends CubeHBaseRPC {
             }
         };
 
-        IGTStore store = new HBaseReadonlyStore(cellListIterator, scanRequest, rawScans.get(0).hbaseColumns, hbaseColumnsToGT, cubeSeg.getRowKeyPreambleSize());
+        IGTStore store = new HBaseReadonlyStore(cellListIterator, scanRequest, rawScans.get(0).hbaseColumns, hbaseColumnsToGT, cubeSeg.getRowKeyPreambleSize(), false);
         IGTScanner rawScanner = store.scan(scanRequest);
 
         final IGTScanner decorateScanner = scanRequest.decorateScanner(rawScanner);

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
index 7d48c1a..442963f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
@@ -18,20 +18,22 @@
 
 package org.apache.kylin.storage.hbase.cube.v2;
 
+import java.util.Iterator;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.TimeUnit;
+
 import org.apache.commons.lang.NotImplementedException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.debug.BackdoorToggles;
+import org.apache.kylin.gridtable.GTScanRequest;
+import org.apache.kylin.gridtable.GTScanSelfTerminatedException;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.Iterator;
-import java.util.concurrent.ArrayBlockingQueue;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.TimeUnit;
-
 class ExpectedSizeIterator implements Iterator<byte[]> {
     private static final Logger logger = LoggerFactory.getLogger(ExpectedSizeIterator.class);
 
@@ -48,22 +50,24 @@ class ExpectedSizeIterator implements Iterator<byte[]> {
         this.expectedSize = expectedSize;
         this.queue = new ArrayBlockingQueue<byte[]>(expectedSize);
 
+        StringBuilder sb = new StringBuilder();
         Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
+
         this.rpcTimeout = hconf.getInt(HConstants.HBASE_RPC_TIMEOUT_KEY, HConstants.DEFAULT_HBASE_RPC_TIMEOUT);
         this.timeout = this.rpcTimeout * hconf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);
-        logger.info("rpc timeout is {} and after multiply retry times become {}", this.rpcTimeout, this.timeout);
-        this.timeout = Math.max(this.timeout, 5 * 60000);
+        sb.append("rpc timeout is " + this.rpcTimeout + " and after multiply retry times becomes " + this.timeout);
+
         this.timeout *= KylinConfig.getInstanceFromEnv().getCubeVisitTimeoutTimes();
+        sb.append(" after multiply kylin.query.cube.visit.timeout.times becomes " + this.timeout);
+
+        logger.info(sb.toString());
 
         if (BackdoorToggles.getQueryTimeout() != -1) {
             this.timeout = BackdoorToggles.getQueryTimeout();
+            logger.info("rpc timeout is overwritten to " + this.timeout);
         }
 
-        this.timeout *= 1.1; // allow for some delay
-
-        logger.info("Final Timeout for ExpectedSizeIterator is: " + this.timeout);
-
-        this.timeoutTS = System.currentTimeMillis() + this.timeout;
+        this.timeoutTS = System.currentTimeMillis() + 2 * this.timeout;//longer timeout than coprocessor so that query thread will not timeout faster than coprocessor
     }
 
     @Override
@@ -85,9 +89,14 @@ class ExpectedSizeIterator implements Iterator<byte[]> {
             }
 
             if (coprocException != null) {
-                throw new RuntimeException("Error in coprocessor", coprocException);
+                if (coprocException instanceof GTScanSelfTerminatedException)
+                    throw (GTScanSelfTerminatedException) coprocException;
+                else
+                    throw new RuntimeException("Error in coprocessor",coprocException);
+                
             } else if (ret == null) {
-                throw new RuntimeException("Timeout visiting cube!");
+                throw new RuntimeException("Timeout visiting cube! Check why coprocessor exception is not sent back? In coprocessor Self-termination is checked every " + //
+                        GTScanRequest.terminateCheckInterval + " scanned rows, the configured timeout(" + timeout + ") cannot support this many scans?");
             } else {
                 return ret;
             }
@@ -110,7 +119,7 @@ class ExpectedSizeIterator implements Iterator<byte[]> {
     }
 
     public long getRpcTimeout() {
-        return this.rpcTimeout;
+        return this.timeout;
     }
 
     public void notifyCoprocException(Throwable ex) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java
index 4b9b4fa..1d8ad79 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java
@@ -43,13 +43,15 @@ public class HBaseReadonlyStore implements IGTStore {
     private List<Pair<byte[], byte[]>> hbaseColumns;
     private List<List<Integer>> hbaseColumnsToGT;
     private int rowkeyPreambleSize;
+    private boolean withDelay = false;
 
-    public HBaseReadonlyStore(CellListIterator cellListIterator, GTScanRequest gtScanRequest, List<Pair<byte[], byte[]>> hbaseColumns, List<List<Integer>> hbaseColumnsToGT, int rowkeyPreambleSize) {
+    public HBaseReadonlyStore(CellListIterator cellListIterator, GTScanRequest gtScanRequest, List<Pair<byte[], byte[]>> hbaseColumns, List<List<Integer>> hbaseColumnsToGT, int rowkeyPreambleSize, boolean withDelay) {
         this.cellListIterator = cellListIterator;
         this.info = gtScanRequest.getInfo();
         this.hbaseColumns = hbaseColumns;
         this.hbaseColumnsToGT = hbaseColumnsToGT;
         this.rowkeyPreambleSize = rowkeyPreambleSize;
+        this.withDelay = withDelay;
     }
 
     @Override
@@ -95,6 +97,13 @@ public class HBaseReadonlyStore implements IGTStore {
 
                     @Override
                     public boolean hasNext() {
+                        if (withDelay) {
+                            try {
+                                Thread.sleep(10);
+                            } catch (InterruptedException e) {
+                                e.printStackTrace();
+                            }
+                        }
                         return cellListIterator.hasNext();
                     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
index b29d0d1..064d100 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
@@ -170,7 +170,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
 
     @SuppressWarnings("checkstyle:methodlength")
     @Override
-    public void visitCube(final RpcController controller, CubeVisitProtos.CubeVisitRequest request, RpcCallback<CubeVisitProtos.CubeVisitResponse> done) {
+    public void visitCube(final RpcController controller, final CubeVisitProtos.CubeVisitRequest request, RpcCallback<CubeVisitProtos.CubeVisitResponse> done) {
         List<RegionScanner> regionScanners = Lists.newArrayList();
         HRegion region = null;
 
@@ -241,7 +241,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
             }
 
             if (behavior.ordinal() < CoprocessorBehavior.SCAN_FILTER_AGGR_CHECKMEM.ordinal()) {
-                scanReq.setAggCacheMemThreshold(0); // disable mem check if so told
+                scanReq.disableAggCacheMemCheck(); // disable mem check if so told
             }
 
             final MutableBoolean scanNormalComplete = new MutableBoolean(true);
@@ -266,7 +266,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
                         throw new GTScanExceedThresholdException("Exceed scan threshold at " + counter);
                     }
 
-                    if (counter % 100000 == 1) {
+                    if (counter % (10 * GTScanRequest.terminateCheckInterval) == 1) {
                         logger.info("Scanned " + counter + " rows from HBase.");
                     }
                     counter++;
@@ -284,7 +284,8 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
                 }
             };
 
-            IGTStore store = new HBaseReadonlyStore(cellListIterator, scanReq, hbaseRawScans.get(0).hbaseColumns, hbaseColumnsToGT, request.getRowkeyPreambleSize());
+            IGTStore store = new HBaseReadonlyStore(cellListIterator, scanReq, hbaseRawScans.get(0).hbaseColumns, hbaseColumnsToGT, //
+                    request.getRowkeyPreambleSize(), CoprocessorBehavior.SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY.toString().equals(request.getBehavior()));
 
             IGTScanner rawScanner = store.scan(scanReq);
             IGTScanner finalScanner = scanReq.decorateScanner(rawScanner, //
@@ -299,14 +300,9 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
             try {
                 for (GTRecord oneRecord : finalScanner) {
 
-                    if (finalRowCount > storagePushDownLimit) {
-                        logger.info("The finalScanner aborted because storagePushDownLimit is satisfied");
-                        break;
-                    }
-
-                    if (finalRowCount % 100000 == 1) {
+                    if (finalRowCount % GTScanRequest.terminateCheckInterval == 1) {
                         if (System.currentTimeMillis() > deadline) {
-                            throw new GTScanTimeoutException("finalScanner timeouts after scanned " + finalRowCount);
+                            throw new GTScanTimeoutException("finalScanner timeouts after contributed " + finalRowCount);
                         }
                     }
 
@@ -319,7 +315,15 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
                     }
 
                     outputStream.write(buffer.array(), 0, buffer.position());
+
                     finalRowCount++;
+
+                    //if it's doing storage aggr, then should rely on GTAggregateScanner's limit check
+                    if (!scanReq.isDoingStorageAggregation() && finalRowCount >= storagePushDownLimit) {
+                        //read one more record than limit
+                        logger.info("The finalScanner aborted because storagePushDownLimit is satisfied");
+                        break;
+                    }
                 }
             } catch (GTScanTimeoutException e) {
                 scanNormalComplete.setValue(false);


[48/50] [abbrv] kylin git commit: KYLIN-1726 allow job discard itself

Posted by sh...@apache.org.
KYLIN-1726 allow job discard itself

Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/aff2df59
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/aff2df59
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/aff2df59

Branch: refs/heads/KYLIN-1726
Commit: aff2df5987e98ee9fd64d4803a8a2dea90013e40
Parents: aa30880
Author: shaofengshi <sh...@apache.org>
Authored: Tue Sep 13 10:28:03 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Wed Sep 14 16:34:36 2016 +0800

----------------------------------------------------------------------
 .../java/org/apache/kylin/cube/CubeManager.java | 14 ++-----
 .../kylin/job/execution/AbstractExecutable.java |  2 +
 .../job/execution/DefaultChainedExecutable.java |  2 +
 .../kylin/job/execution/ExecuteResult.java      |  4 ++
 .../kylin/job/DiscardedTestExecutable.java      | 41 ++++++++++++++++++++
 .../impl/threadpool/DefaultSchedulerTest.java   | 16 ++++++++
 6 files changed, 68 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/aff2df59/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
index 11eabce..d494fcc 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
@@ -322,34 +322,26 @@ public class CubeManager implements IRealizationProvider {
             Iterator<CubeSegment> iterator = newSegs.iterator();
             while (iterator.hasNext()) {
                 CubeSegment currentSeg = iterator.next();
-                boolean found = false;
                 for (CubeSegment toRemoveSeg : update.getToRemoveSegs()) {
                     if (currentSeg.getUuid().equals(toRemoveSeg.getUuid())) {
+                        logger.info("Remove segment " + currentSeg.toString());
+                        toRemoveResources.add(currentSeg.getStatisticsResourcePath());
                         iterator.remove();
-                        toRemoveResources.add(toRemoveSeg.getStatisticsResourcePath());
-                        found = true;
+                        break;
                     }
                 }
-                if (found == false) {
-                    logger.error("Segment '" + currentSeg.getName() + "' doesn't exist for remove.");
-                }
             }
 
         }
 
         if (update.getToUpdateSegs() != null) {
             for (CubeSegment segment : update.getToUpdateSegs()) {
-                boolean found = false;
                 for (int i = 0; i < newSegs.size(); i++) {
                     if (newSegs.get(i).getUuid().equals(segment.getUuid())) {
                         newSegs.set(i, segment);
-                        found = true;
                         break;
                     }
                 }
-                if (found == false) {
-                    logger.error("Segment '" + segment.getName() + "' doesn't exist for update.");
-                }
             }
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/aff2df59/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java b/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
index 90e4d3c..b4ca469 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
@@ -74,6 +74,8 @@ public abstract class AbstractExecutable implements Executable, Idempotent {
         if (!isDiscarded()) {
             if (result.succeed()) {
                 executableManager.updateJobOutput(getId(), ExecutableState.SUCCEED, null, result.output());
+            } else if (result.discarded()) {
+                executableManager.updateJobOutput(getId(), ExecutableState.DISCARDED, null, result.output());
             } else {
                 executableManager.updateJobOutput(getId(), ExecutableState.ERROR, null, result.output());
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/aff2df59/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java b/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
index 39a5f4f..5a57b05 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
@@ -119,6 +119,8 @@ public class DefaultChainedExecutable extends AbstractExecutable implements Chai
             } else {
                 jobService.updateJobOutput(getId(), ExecutableState.READY, null, null);
             }
+        } else if (result.discarded()) {
+            jobService.updateJobOutput(getId(), ExecutableState.DISCARDED, null, result.output());
         } else {
             setEndTime(System.currentTimeMillis());
             jobService.updateJobOutput(getId(), ExecutableState.ERROR, null, result.output());

http://git-wip-us.apache.org/repos/asf/kylin/blob/aff2df59/core-job/src/main/java/org/apache/kylin/job/execution/ExecuteResult.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/ExecuteResult.java b/core-job/src/main/java/org/apache/kylin/job/execution/ExecuteResult.java
index 760a574..2347e7d 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/ExecuteResult.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/ExecuteResult.java
@@ -49,6 +49,10 @@ public final class ExecuteResult {
         return state == State.SUCCEED;
     }
 
+    public boolean discarded() {
+        return state == State.DISCARDED;
+    }
+
     public String output() {
         return output;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/aff2df59/core-job/src/test/java/org/apache/kylin/job/DiscardedTestExecutable.java
----------------------------------------------------------------------
diff --git a/core-job/src/test/java/org/apache/kylin/job/DiscardedTestExecutable.java b/core-job/src/test/java/org/apache/kylin/job/DiscardedTestExecutable.java
new file mode 100644
index 0000000..9362e18
--- /dev/null
+++ b/core-job/src/test/java/org/apache/kylin/job/DiscardedTestExecutable.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.job;
+
+import org.apache.kylin.job.exception.ExecuteException;
+import org.apache.kylin.job.execution.ExecutableContext;
+import org.apache.kylin.job.execution.ExecuteResult;
+
+/**
+ */
+public class DiscardedTestExecutable extends BaseTestExecutable {
+
+    public DiscardedTestExecutable() {
+        super();
+    }
+
+    @Override
+    protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
+        try {
+            Thread.sleep(1000);
+        } catch (InterruptedException e) {
+        }
+        return new ExecuteResult(ExecuteResult.State.DISCARDED, "discarded");
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/aff2df59/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java
----------------------------------------------------------------------
diff --git a/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java b/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java
index df521f9..2baf10a 100644
--- a/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java
+++ b/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java
@@ -29,6 +29,7 @@ import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.ScheduledFuture;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.kylin.job.DiscardedTestExecutable;
 import org.apache.kylin.job.BaseTestExecutable;
 import org.apache.kylin.job.ErrorTestExecutable;
 import org.apache.kylin.job.FailedTestExecutable;
@@ -83,6 +84,21 @@ public class DefaultSchedulerTest extends BaseSchedulerTest {
     }
 
     @Test
+    public void testSucceedAndDiscarded() throws Exception {
+        DefaultChainedExecutable job = new DefaultChainedExecutable();
+        BaseTestExecutable task1 = new SucceedTestExecutable();
+        BaseTestExecutable task2 = new DiscardedTestExecutable();
+        job.addTask(task1);
+        job.addTask(task2);
+        jobService.addJob(job);
+        waitForJobFinish(job.getId());
+        Assert.assertEquals(ExecutableState.DISCARDED, jobService.getOutput(job.getId()).getState());
+        Assert.assertEquals(ExecutableState.SUCCEED, jobService.getOutput(task1.getId()).getState());
+        Assert.assertEquals(ExecutableState.DISCARDED, jobService.getOutput(task2.getId()).getState());
+    }
+
+
+    @Test
     public void testSucceedAndError() throws Exception {
         DefaultChainedExecutable job = new DefaultChainedExecutable();
         BaseTestExecutable task1 = new ErrorTestExecutable();


[21/50] [abbrv] kylin git commit: KYLIN-1983 only keep apache-license in main pom.xml

Posted by sh...@apache.org.
KYLIN-1983 only keep apache-license in main pom.xml

Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/c59d63de
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/c59d63de
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/c59d63de

Branch: refs/heads/KYLIN-1726
Commit: c59d63de6fc404456bd59f4ca5d36e48fb251637
Parents: c5c8501
Author: shaofengshi <sh...@apache.org>
Authored: Sun Sep 11 10:00:54 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Sun Sep 11 10:13:28 2016 +0800

----------------------------------------------------------------------
 kylin-it/pom.xml      | 63 ---------------------------------------
 pom.xml               | 11 +++++++
 query/pom.xml         | 68 ------------------------------------------
 storage-hbase/pom.xml | 73 ----------------------------------------------
 4 files changed, 11 insertions(+), 204 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/c59d63de/kylin-it/pom.xml
----------------------------------------------------------------------
diff --git a/kylin-it/pom.xml b/kylin-it/pom.xml
index f2e03b6..e14fa7d 100644
--- a/kylin-it/pom.xml
+++ b/kylin-it/pom.xml
@@ -332,69 +332,6 @@
                 </plugins>
             </build>
         </profile>
-        <profile>
-            <!-- This profile adds/overrides few features of the 'apache-release'
-                 profile in the parent pom. -->
-            <id>apache-release</id>
-            <build>
-                <plugins>
-                    <!-- Apache-RAT checks for files without headers.
-                         If run on a messy developer's sandbox, it will fail.
-                         This serves as a reminder to only build a release in a clean
-                         sandbox! -->
-                    <plugin>
-                        <groupId>org.apache.rat</groupId>
-                        <artifactId>apache-rat-plugin</artifactId>
-                        <configuration>
-                            <numUnapprovedLicenses>0</numUnapprovedLicenses>
-                            <excludes>
-                                <!-- test data -->
-                                <exclude>src/test/**/*.dat</exclude>
-                                <exclude>src/test/**/*.expected</exclude>
-                            </excludes>
-                        </configuration>
-                        <executions>
-                            <execution>
-                                <phase>verify</phase>
-                                <goals>
-                                    <goal>check</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                        <dependencies>
-                            <dependency>
-                                <groupId>org.apache.maven.doxia</groupId>
-                                <artifactId>doxia-core</artifactId>
-                                <exclusions>
-                                    <exclusion>
-                                        <groupId>xerces</groupId>
-                                        <artifactId>xercesImpl</artifactId>
-                                    </exclusion>
-                                </exclusions>
-                            </dependency>
-                        </dependencies>
-                    </plugin>
-                    <plugin>
-                        <groupId>net.ju-n.maven.plugins</groupId>
-                        <artifactId>checksum-maven-plugin</artifactId>
-                        <executions>
-                            <execution>
-                                <goals>
-                                    <goal>artifacts</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                        <configuration>
-                            <algorithms>
-                                <algorithm>MD5</algorithm>
-                                <algorithm>SHA-1</algorithm>
-                            </algorithms>
-                            <failOnError>false</failOnError>
-                        </configuration>
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
     </profiles>
 
 </project>

http://git-wip-us.apache.org/repos/asf/kylin/blob/c59d63de/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index d5925b9..38f9365 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1215,6 +1215,17 @@
                                 <exclude>**/*.DEF</exclude>
                                 <exclude>**/*.isl</exclude>
                                 <exclude>**/*.isproj</exclude>
+
+                                <!-- protobuf generated -->
+                                <exclude>
+                                    src/main/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/generated/IIProtos.java
+                                </exclude>
+                                <exclude>
+                                    src/main/java/org/apache/kylin/storage/hbase/cube/v1/filter/generated/FilterProtosExt.java
+                                </exclude>
+                                <exclude>
+                                    src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/generated/CubeVisitProtos.java
+                                </exclude>
                             </excludes>
                         </configuration>
                         <executions>

http://git-wip-us.apache.org/repos/asf/kylin/blob/c59d63de/query/pom.xml
----------------------------------------------------------------------
diff --git a/query/pom.xml b/query/pom.xml
index 0937b5a..8bb72ae 100644
--- a/query/pom.xml
+++ b/query/pom.xml
@@ -158,72 +158,4 @@
             </exclusions>
         </dependency>
     </dependencies>
-
-
-    <profiles>
-        <profile>
-            <!-- This profile adds/overrides few features of the 'apache-release'
-                 profile in the parent pom. -->
-            <id>apache-release</id>
-            <build>
-                <plugins>
-                    <!-- Apache-RAT checks for files without headers.
-                         If run on a messy developer's sandbox, it will fail.
-                         This serves as a reminder to only build a release in a clean
-                         sandbox! -->
-                    <plugin>
-                        <groupId>org.apache.rat</groupId>
-                        <artifactId>apache-rat-plugin</artifactId>
-                        <configuration>
-                            <numUnapprovedLicenses>0</numUnapprovedLicenses>
-                            <excludes>
-                                <!-- test data -->
-                                <exclude>src/test/**/*.dat</exclude>
-                                <exclude>src/test/**/*.expected</exclude>
-                            </excludes>
-                        </configuration>
-                        <executions>
-                            <execution>
-                                <phase>verify</phase>
-                                <goals>
-                                    <goal>check</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                        <dependencies>
-                            <dependency>
-                                <groupId>org.apache.maven.doxia</groupId>
-                                <artifactId>doxia-core</artifactId>
-                                <exclusions>
-                                    <exclusion>
-                                        <groupId>xerces</groupId>
-                                        <artifactId>xercesImpl</artifactId>
-                                    </exclusion>
-                                </exclusions>
-                            </dependency>
-                        </dependencies>
-                    </plugin>
-                    <plugin>
-                        <groupId>net.ju-n.maven.plugins</groupId>
-                        <artifactId>checksum-maven-plugin</artifactId>
-                        <executions>
-                            <execution>
-                                <goals>
-                                    <goal>artifacts</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                        <configuration>
-                            <algorithms>
-                                <algorithm>MD5</algorithm>
-                                <algorithm>SHA-1</algorithm>
-                            </algorithms>
-                            <failOnError>false</failOnError>
-                        </configuration>
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
-    </profiles>
-
 </project>

http://git-wip-us.apache.org/repos/asf/kylin/blob/c59d63de/storage-hbase/pom.xml
----------------------------------------------------------------------
diff --git a/storage-hbase/pom.xml b/storage-hbase/pom.xml
index b21a931..b332354 100644
--- a/storage-hbase/pom.xml
+++ b/storage-hbase/pom.xml
@@ -150,77 +150,4 @@
         </plugins>
     </build>
 
-    <profiles>
-        <profile>
-            <!-- This profile adds/overrides few features of the 'apache-release'
-                 profile in the parent pom. -->
-            <id>apache-release</id>
-            <build>
-                <plugins>
-                    <!-- Apache-RAT checks for files without headers.
-                         If run on a messy developer's sandbox, it will fail.
-                         This serves as a reminder to only build a release in a clean
-                         sandbox! -->
-                    <plugin>
-                        <groupId>org.apache.rat</groupId>
-                        <artifactId>apache-rat-plugin</artifactId>
-                        <configuration>
-                            <numUnapprovedLicenses>0</numUnapprovedLicenses>
-                            <excludes>
-                                <!-- protobuf generated -->
-                                <exclude>
-                                    src/main/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/generated/IIProtos.java
-                                </exclude>
-                                <exclude>
-                                    src/main/java/org/apache/kylin/storage/hbase/cube/v1/filter/generated/FilterProtosExt.java
-                                </exclude>
-                                <exclude>
-                                    src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/generated/CubeVisitProtos.java
-                                </exclude>
-                                <exclude>**/src/test/resources/**</exclude>
-                            </excludes>
-                        </configuration>
-                        <executions>
-                            <execution>
-                                <phase>verify</phase>
-                                <goals>
-                                    <goal>check</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                        <dependencies>
-                            <dependency>
-                                <groupId>org.apache.maven.doxia</groupId>
-                                <artifactId>doxia-core</artifactId>
-                                <exclusions>
-                                    <exclusion>
-                                        <groupId>xerces</groupId>
-                                        <artifactId>xercesImpl</artifactId>
-                                    </exclusion>
-                                </exclusions>
-                            </dependency>
-                        </dependencies>
-                    </plugin>
-                    <plugin>
-                        <groupId>net.ju-n.maven.plugins</groupId>
-                        <artifactId>checksum-maven-plugin</artifactId>
-                        <executions>
-                            <execution>
-                                <goals>
-                                    <goal>artifacts</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                        <configuration>
-                            <algorithms>
-                                <algorithm>MD5</algorithm>
-                                <algorithm>SHA-1</algorithm>
-                            </algorithms>
-                            <failOnError>false</failOnError>
-                        </configuration>
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
-    </profiles>
 </project>


[16/50] [abbrv] kylin git commit: minor, fix job start/end in diagnosis

Posted by sh...@apache.org.
minor, fix job start/end in diagnosis


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/d7cbf673
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/d7cbf673
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/d7cbf673

Branch: refs/heads/KYLIN-1726
Commit: d7cbf6732a9571007de61dd492ed4d7559cbf9ac
Parents: d7a3fdf
Author: lidongsjtu <li...@apache.org>
Authored: Sat Sep 10 15:23:43 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Sat Sep 10 16:09:19 2016 +0800

----------------------------------------------------------------------
 tool/src/main/java/org/apache/kylin/tool/JobInstanceExtractor.java | 2 ++
 1 file changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/d7cbf673/tool/src/main/java/org/apache/kylin/tool/JobInstanceExtractor.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/JobInstanceExtractor.java b/tool/src/main/java/org/apache/kylin/tool/JobInstanceExtractor.java
index 086a84f..ef77c6a 100644
--- a/tool/src/main/java/org/apache/kylin/tool/JobInstanceExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/JobInstanceExtractor.java
@@ -133,6 +133,8 @@ public class JobInstanceExtractor extends AbstractInfoExtractor {
         result.setType(CubeBuildTypeEnum.BUILD);
         result.setStatus(parseToJobStatus(output.getState()));
         result.setMrWaiting(AbstractExecutable.getExtraInfoAsLong(output, CubingJob.MAP_REDUCE_WAIT_TIME, 0L) / 1000);
+        result.setExecStartTime(AbstractExecutable.getStartTime(output));
+        result.setExecEndTime(AbstractExecutable.getEndTime(output));
         result.setDuration(AbstractExecutable.getDuration(AbstractExecutable.getStartTime(output), AbstractExecutable.getEndTime(output)) / 1000);
         for (int i = 0; i < cubeJob.getTasks().size(); ++i) {
             AbstractExecutable task = cubeJob.getTasks().get(i);


[25/50] [abbrv] kylin git commit: KYLIN-1827 code format

Posted by sh...@apache.org.
KYLIN-1827 code format

Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/aef7869c
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/aef7869c
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/aef7869c

Branch: refs/heads/KYLIN-1726
Commit: aef7869c953dec1caaf4b0e426b77c0a9a8938b1
Parents: 0954176
Author: shaofengshi <sh...@apache.org>
Authored: Mon Sep 12 11:36:52 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Mon Sep 12 11:36:52 2016 +0800

----------------------------------------------------------------------
 .../kylin/job/execution/AbstractExecutable.java | 20 ++++++++++----------
 .../org/apache/kylin/engine/mr/CubingJob.java   |  2 +-
 2 files changed, 11 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/aef7869c/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java b/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
index 1eee5da..90e4d3c 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
@@ -125,10 +125,10 @@ public abstract class AbstractExecutable implements Executable, Idempotent {
 
             onExecuteFinished(result, executableContext);
         } catch (Exception e) {
-            if (isMetaDataPersistException(e)){
+            if (isMetaDataPersistException(e)) {
                 handleMetaDataPersistException(e);
             }
-            if (e instanceof ExecuteException){
+            if (e instanceof ExecuteException) {
                 throw e;
             } else {
                 throw new ExecuteException(e);
@@ -142,15 +142,15 @@ public abstract class AbstractExecutable implements Executable, Idempotent {
     }
 
     private boolean isMetaDataPersistException(Exception e) {
-        if (e instanceof PersistentException){
+        if (e instanceof PersistentException) {
             return true;
         }
 
         Throwable t = e.getCause();
         int depth = 0;
-        while (t!= null && depth<5) {
-            depth ++;
-            if (t instanceof PersistentException){
+        while (t != null && depth < 5) {
+            depth++;
+            if (t instanceof PersistentException) {
                 return true;
             }
             t = t.getCause();
@@ -242,19 +242,19 @@ public abstract class AbstractExecutable implements Executable, Idempotent {
     protected final void notifyUserStatusChange(ExecutableContext context, ExecutableState state) {
         try {
             final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-            List<String> users =  getAllNofifyUsers(kylinConfig);
+            List<String> users = getAllNofifyUsers(kylinConfig);
             if (users.isEmpty()) {
                 logger.warn("no need to send email, user list is empty");
                 return;
             }
             final Pair<String, String> email = formatNotifications(context, state);
-            doSendMail(kylinConfig,users,email);
+            doSendMail(kylinConfig, users, email);
         } catch (Exception e) {
             logger.error("error send email", e);
         }
     }
 
-    private List<String> getAllNofifyUsers(KylinConfig kylinConfig){
+    private List<String> getAllNofifyUsers(KylinConfig kylinConfig) {
         List<String> users = Lists.newArrayList();
         users.addAll(getNotifyList());
         final String[] adminDls = kylinConfig.getAdminDls();
@@ -266,7 +266,7 @@ public abstract class AbstractExecutable implements Executable, Idempotent {
         return users;
     }
 
-    private void doSendMail(KylinConfig kylinConfig, List<String> users, Pair<String,String> email){
+    private void doSendMail(KylinConfig kylinConfig, List<String> users, Pair<String, String> email) {
         if (email == null) {
             logger.warn("no need to send email, content is null");
             return;

http://git-wip-us.apache.org/repos/asf/kylin/blob/aef7869c/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java
index 9c7f57a..1a0113d 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/CubingJob.java
@@ -220,7 +220,7 @@ public class CubingJob extends DefaultChainedExecutable {
         } catch (UnknownHostException e) {
             logger.warn(e.getLocalizedMessage(), e);
         }
-        sendMail(Pair.of(title,content));
+        sendMail(Pair.of(title, content));
     }
 
     public long getMapReduceWaitTime() {


[30/50] [abbrv] kylin git commit: KYLIN-1922 fix CI

Posted by sh...@apache.org.
KYLIN-1922 fix CI


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/3c4537dc
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/3c4537dc
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/3c4537dc

Branch: refs/heads/KYLIN-1726
Commit: 3c4537dc8f15ccd8b0f1102ed67b3f5a21ec29d6
Parents: 6ed643b
Author: Hongbin Ma <ma...@apache.org>
Authored: Tue Sep 13 11:29:17 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Tue Sep 13 11:29:17 2016 +0800

----------------------------------------------------------------------
 .../apache/kylin/query/ITKylinQueryTest.java    | 11 ++++++----
 .../src/test/resources/query/sql/query45.sql    |  4 ++--
 .../test/resources/query/sql_limit/query02.sql  |  4 ++--
 .../query/sql_topn/query45.sql.disable          | 23 ++++++++++++++++++++
 .../routing/rules/RealizationSortRule.java      | 10 +++++++++
 5 files changed, 44 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/3c4537dc/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
index b9895e8..2417d68 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
@@ -29,7 +29,6 @@ import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.debug.BackdoorToggles;
 import org.apache.kylin.gridtable.GTScanSelfTerminatedException;
-import org.apache.kylin.gridtable.GTScanTimeoutException;
 import org.apache.kylin.gridtable.StorageSideBehavior;
 import org.apache.kylin.metadata.realization.RealizationType;
 import org.apache.kylin.query.routing.Candidate;
@@ -141,9 +140,13 @@ public class ITKylinQueryTest extends KylinTestBase {
     //don't try to ignore this test, try to clean your "temp" folder
     @Test
     public void testTempQuery() throws Exception {
-        PRINT_RESULT = true;
-        execAndCompQuery(getQueryFolderPrefix() + "src/test/resources/query/temp", null, true);
-        PRINT_RESULT = false;
+        try {
+            PRINT_RESULT = true;
+            execAndCompQuery(getQueryFolderPrefix() + "src/test/resources/query/temp", null, true);
+        } finally {
+            PRINT_RESULT = false;
+        }
+
     }
 
     @Ignore

http://git-wip-us.apache.org/repos/asf/kylin/blob/3c4537dc/kylin-it/src/test/resources/query/sql/query45.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql/query45.sql b/kylin-it/src/test/resources/query/sql/query45.sql
index 7b55443..d3cfb06 100644
--- a/kylin-it/src/test/resources/query/sql/query45.sql
+++ b/kylin-it/src/test/resources/query/sql/query45.sql
@@ -18,6 +18,6 @@
 
 
 
-select seller_id, sum(price) as s from test_kylin_fact
+select seller_id,lstg_format_name, sum(price) as s from test_kylin_fact
   where lstg_format_name='FP-GTC' 
-  group by seller_id limit 20
+  group by seller_id,lstg_format_name

http://git-wip-us.apache.org/repos/asf/kylin/blob/3c4537dc/kylin-it/src/test/resources/query/sql_limit/query02.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql_limit/query02.sql b/kylin-it/src/test/resources/query/sql_limit/query02.sql
index 53f7bd7..4346dd9 100644
--- a/kylin-it/src/test/resources/query/sql_limit/query02.sql
+++ b/kylin-it/src/test/resources/query/sql_limit/query02.sql
@@ -18,7 +18,7 @@
 
 
 
-select seller_id, sum(price) from test_kylin_fact
+select seller_id,lstg_format_name,sum(price) from test_kylin_fact
   where lstg_format_name='FP-GTC' 
-  group by seller_id limit 20
+  group by seller_id,lstg_format_name limit 20
  

http://git-wip-us.apache.org/repos/asf/kylin/blob/3c4537dc/kylin-it/src/test/resources/query/sql_topn/query45.sql.disable
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql_topn/query45.sql.disable b/kylin-it/src/test/resources/query/sql_topn/query45.sql.disable
new file mode 100644
index 0000000..39f9571
--- /dev/null
+++ b/kylin-it/src/test/resources/query/sql_topn/query45.sql.disable
@@ -0,0 +1,23 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+
+
+
+select seller_id, sum(price) as s from test_kylin_fact
+  where lstg_format_name='FP-GTC' 
+  group by seller_id

http://git-wip-us.apache.org/repos/asf/kylin/blob/3c4537dc/query/src/main/java/org/apache/kylin/query/routing/rules/RealizationSortRule.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/routing/rules/RealizationSortRule.java b/query/src/main/java/org/apache/kylin/query/routing/rules/RealizationSortRule.java
index d3c67d7..bd83af1 100644
--- a/query/src/main/java/org/apache/kylin/query/routing/rules/RealizationSortRule.java
+++ b/query/src/main/java/org/apache/kylin/query/routing/rules/RealizationSortRule.java
@@ -23,12 +23,22 @@ import java.util.List;
 
 import org.apache.kylin.query.routing.Candidate;
 import org.apache.kylin.query.routing.RoutingRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  */
 public class RealizationSortRule extends RoutingRule {
+    private static final Logger logger = LoggerFactory.getLogger(RealizationSortRule.class);
+
     @Override
     public void apply(List<Candidate> candidates) {
+        StringBuilder sb = new StringBuilder();
+        for (Candidate candidate : candidates) {
+            sb.append(candidate.getRealization().getCanonicalName() + " priority " + candidate.getPriority() + " cost " + candidate.getCapability().cost + ". ");
+        }
+        logger.info(sb.toString());
+
         Collections.sort(candidates);
     }
 }


[14/50] [abbrv] kylin git commit: remove unnecessary raw measure cond

Posted by sh...@apache.org.
remove unnecessary raw measure cond


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/a05f1114
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/a05f1114
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/a05f1114

Branch: refs/heads/KYLIN-1726
Commit: a05f11146c9edd4ba18618f38fd13c9b7e4806ce
Parents: 942406b
Author: Hongbin Ma <ma...@apache.org>
Authored: Fri Sep 9 23:45:17 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Fri Sep 9 23:45:17 2016 +0800

----------------------------------------------------------------------
 .../localmeta/cube_desc/test_kylin_cube_without_slr_desc.json     | 3 ---
 .../cube_desc/test_kylin_cube_without_slr_left_join_desc.json     | 3 ---
 2 files changed, 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/a05f1114/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_desc.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_desc.json b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_desc.json
index 7de2ae2..d185175 100644
--- a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_desc.json
+++ b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_desc.json
@@ -234,9 +234,6 @@
               "item_count_sum",
               "SITE_EXTENDED_1",
               "SITE_EXTENDED_2",
-              "CAL_DT_RAW",
-              "LSTG_FORMAT_NAME_RAW",
-              "LEAF_CATEG_ID_RAW",
               "PRICE_RAW"
             ]
           }

http://git-wip-us.apache.org/repos/asf/kylin/blob/a05f1114/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json
index 4270aab..2aea1a8 100644
--- a/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json
+++ b/examples/test_case_data/localmeta/cube_desc/test_kylin_cube_without_slr_left_join_desc.json
@@ -266,9 +266,6 @@
               "gmv_max",
               "trans_cnt",
               "item_count_sum",
-              "CAL_DT_RAW",
-              "LSTG_FORMAT_NAME_RAW",
-              "LEAF_CATEG_ID_RAW",
               "PRICE_RAW"
             ]
           }


[12/50] [abbrv] kylin git commit: KYLIN-1922 fix CI

Posted by sh...@apache.org.
KYLIN-1922 fix CI


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/466cf1af
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/466cf1af
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/466cf1af

Branch: refs/heads/KYLIN-1726
Commit: 466cf1afb19ee1acb4f99ff72ac61e7689617957
Parents: 618cf28
Author: Hongbin Ma <ma...@apache.org>
Authored: Fri Sep 9 22:27:17 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Fri Sep 9 22:27:17 2016 +0800

----------------------------------------------------------------------
 .../apache/kylin/query/ITKylinQueryTest.java    | 53 ++++++++------------
 1 file changed, 21 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/466cf1af/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
index 0efea64..5f6af7a 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
@@ -32,6 +32,7 @@ import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.debug.BackdoorToggles;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
 import org.apache.kylin.gridtable.GTScanSelfTerminatedException;
+import org.apache.kylin.gridtable.StorageSideBehavior;
 import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.metadata.realization.RealizationType;
 import org.apache.kylin.query.enumerator.OLAPQuery;
@@ -40,12 +41,9 @@ import org.apache.kylin.query.routing.Candidate;
 import org.apache.kylin.query.routing.rules.RemoveBlackoutRealizationsRule;
 import org.apache.kylin.query.schema.OLAPSchemaFactory;
 import org.apache.kylin.storage.hbase.HBaseStorage;
-import org.apache.kylin.gridtable.StorageSideBehavior;
 import org.apache.kylin.storage.hbase.cube.v1.coprocessor.observer.ObserverEnabler;
 import org.dbunit.database.DatabaseConnection;
 import org.dbunit.database.IDatabaseConnection;
-import org.hamcrest.BaseMatcher;
-import org.hamcrest.Description;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
@@ -122,41 +120,32 @@ public class ITKylinQueryTest extends KylinTestBase {
     @Test
     public void testTimeoutQuery() throws Exception {
 
-        thrown.expect(SQLException.class);
-
-        //should not break at table duplicate check, should fail at model duplicate check
-        thrown.expectCause(new BaseMatcher<Throwable>() {
-            @Override
-            public boolean matches(Object item) {
-                if (item instanceof GTScanSelfTerminatedException) {
-                    return true;
-                }
-                return false;
-            }
-
-            @Override
-            public void describeTo(Description description) {
-            }
-        });
+        try {
 
-        Map<String, String> toggles = Maps.newHashMap();
-        toggles.put(BackdoorToggles.DEBUG_TOGGLE_COPROCESSOR_BEHAVIOR, StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY.toString());//delay 10ms for every scan
-        BackdoorToggles.setToggles(toggles);
+            Map<String, String> toggles = Maps.newHashMap();
+            toggles.put(BackdoorToggles.DEBUG_TOGGLE_COPROCESSOR_BEHAVIOR, StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY.toString());//delay 10ms for every scan
+            BackdoorToggles.setToggles(toggles);
 
-        KylinConfig.getInstanceFromEnv().setProperty("kylin.query.cube.visit.timeout.times", "0.03");//set timeout to 9s
+            KylinConfig.getInstanceFromEnv().setProperty("kylin.query.cube.visit.timeout.times", "0.03");//set timeout to 9s
 
-        //these two cubes has RAW measure, will disturb limit push down
-        RemoveBlackoutRealizationsRule.blackouts.add("CUBE[name=test_kylin_cube_without_slr_left_join_empty]");
-        RemoveBlackoutRealizationsRule.blackouts.add("CUBE[name=test_kylin_cube_without_slr_inner_join_empty]");
+            //these two cubes has RAW measure, will disturb limit push down
+            RemoveBlackoutRealizationsRule.blackouts.add("CUBE[name=test_kylin_cube_without_slr_left_join_empty]");
+            RemoveBlackoutRealizationsRule.blackouts.add("CUBE[name=test_kylin_cube_without_slr_inner_join_empty]");
 
-        execAndCompQuery(getQueryFolderPrefix() + "src/test/resources/query/sql_timeout", null, true);
+            execAndCompQuery(getQueryFolderPrefix() + "src/test/resources/query/sql_timeout", null, true);
+        } catch (SQLException e) {
+            if (!(e.getCause() instanceof GTScanSelfTerminatedException)) {
+                throw new RuntimeException();
+            }
+        } finally {
 
-        //these two cubes has RAW measure, will disturb limit push down
-        RemoveBlackoutRealizationsRule.blackouts.remove("CUBE[name=test_kylin_cube_without_slr_left_join_empty]");
-        RemoveBlackoutRealizationsRule.blackouts.remove("CUBE[name=test_kylin_cube_without_slr_inner_join_empty]");
+            //these two cubes has RAW measure, will disturb limit push down
+            RemoveBlackoutRealizationsRule.blackouts.remove("CUBE[name=test_kylin_cube_without_slr_left_join_empty]");
+            RemoveBlackoutRealizationsRule.blackouts.remove("CUBE[name=test_kylin_cube_without_slr_inner_join_empty]");
 
-        KylinConfig.getInstanceFromEnv().setProperty("kylin.query.cube.visit.timeout.times", "1");//set timeout to 9s 
-        BackdoorToggles.cleanToggles();
+            KylinConfig.getInstanceFromEnv().setProperty("kylin.query.cube.visit.timeout.times", "1");//set timeout to 9s 
+            BackdoorToggles.cleanToggles();
+        }
     }
 
     //don't try to ignore this test, try to clean your "temp" folder


[35/50] [abbrv] kylin git commit: Add Travis build

Posted by sh...@apache.org.
Add Travis build

Signed-off-by: lidongsjtu <li...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/dfa30485
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/dfa30485
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/dfa30485

Branch: refs/heads/KYLIN-1726
Commit: dfa304850f780ce9a546a8f60447ce3febb1c23d
Parents: ad5844b
Author: Yiming Liu <li...@gmail.com>
Authored: Tue Aug 16 09:29:09 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Tue Sep 13 22:47:23 2016 +0800

----------------------------------------------------------------------
 .travis.yml | 16 ++++++++++++++++
 README.md   |  3 +++
 2 files changed, 19 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/dfa30485/.travis.yml
----------------------------------------------------------------------
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..7f4b7de
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,16 @@
+language: java
+
+jdk:
+  - oraclejdk7
+
+#notification:
+#  email:
+#    recipients:
+#      - XXX@apache.org
+#    on_success: always
+#    on_failure: always
+
+#branches:
+#  only:
+#    - master
+

http://git-wip-us.apache.org/repos/asf/kylin/blob/dfa30485/README.md
----------------------------------------------------------------------
diff --git a/README.md b/README.md
index 0c136ef..e72bd56 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,9 @@
 Apache Kylin
 ============
 
+[![Build Status](https://travis-ci.org/apache/kylin.svg?branch=master)](https://travis-ci.org/apache/kylin)
+[![License](https://img.shields.io/badge/license-Apache%202-4EB1BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html)
+
 > Extreme OLAP Engine for Big Data
 
 Apache Kylin is an open source Distributed Analytics Engine, contributed by eBay Inc., provides SQL interface and multi-dimensional analysis (OLAP) on Hadoop supporting extremely large datasets.


[40/50] [abbrv] kylin git commit: KYLIN-2012 more robust approach to hive schema changes

Posted by sh...@apache.org.
KYLIN-2012 more robust approach to hive schema changes


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/17569f6c
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/17569f6c
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/17569f6c

Branch: refs/heads/KYLIN-1726
Commit: 17569f6c32a373f599ef7689f9506b5af5ed68bd
Parents: c48baba
Author: gaodayue <ga...@meituan.com>
Authored: Mon Sep 12 12:05:32 2016 +0800
Committer: gaodayue <ga...@meituan.com>
Committed: Wed Sep 14 15:02:33 2016 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/cube/CubeDescManager.java  |  62 +++---
 .../org/apache/kylin/cube/CubeInstance.java     |  11 +-
 .../java/org/apache/kylin/cube/CubeManager.java |  47 ++--
 .../org/apache/kylin/cube/model/CubeDesc.java   |  47 ++--
 .../model/validation/CubeMetadataValidator.java |  32 +--
 .../realization/RealizationStatusEnum.java      |   2 +-
 .../kylin/rest/controller/CubeController.java   |  44 ++--
 .../apache/kylin/rest/service/CacheService.java |  11 +-
 .../apache/kylin/rest/service/CubeService.java  |  15 --
 .../apache/kylin/rest/service/JobService.java   |   6 +
 .../kylin/rest/service/CubeServiceTest.java     |   1 -
 .../source/hive/HiveSourceTableLoader.java      |  33 ++-
 .../apache/kylin/source/hive/SchemaChecker.java | 216 +++++++++++++++++++
 webapp/app/css/AdminLTE.css                     |   4 +-
 webapp/app/partials/cubes/cubes.html            |  22 +-
 15 files changed, 353 insertions(+), 200 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/17569f6c/core-cube/src/main/java/org/apache/kylin/cube/CubeDescManager.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeDescManager.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeDescManager.java
index 33a6830..1b1cf70 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeDescManager.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeDescManager.java
@@ -23,7 +23,6 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -36,6 +35,7 @@ import org.apache.kylin.cube.model.validation.CubeMetadataValidator;
 import org.apache.kylin.cube.model.validation.ValidateContext;
 import org.apache.kylin.metadata.MetadataConstants;
 import org.apache.kylin.metadata.MetadataManager;
+import org.apache.kylin.metadata.realization.RealizationStatusEnum;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -110,30 +110,34 @@ public class CubeDescManager {
      * @throws IOException
      */
     public CubeDesc reloadCubeDescLocal(String name) throws IOException {
+        // Broken CubeDesc is not allowed to be saved and broadcast.
+        CubeDesc ndesc = loadCubeDesc(CubeDesc.concatResourcePath(name), false);
 
-        // Save Source
-        String path = CubeDesc.concatResourcePath(name);
-
-        // Reload the CubeDesc
-        CubeDesc ndesc = loadCubeDesc(path);
-
-        // Here replace the old one
         cubeDescMap.putLocal(ndesc.getName(), ndesc);
         Cuboid.reloadCache(name);
+
+        // if related cube is in DESCBROKEN state before, change it back to DISABLED
+        CubeManager cubeManager = CubeManager.getInstance(config);
+        for (CubeInstance cube : cubeManager.getCubesByDesc(name)) {
+            if (cube.getStatus() == RealizationStatusEnum.DESCBROKEN) {
+                cubeManager.reloadCubeLocal(cube.getName());
+            }
+        }
+
         return ndesc;
     }
 
-    private CubeDesc loadCubeDesc(String path) throws IOException {
+    private CubeDesc loadCubeDesc(String path, boolean allowBroken) throws IOException {
         ResourceStore store = getStore();
         CubeDesc ndesc = store.getResource(path, CubeDesc.class, CUBE_DESC_SERIALIZER);
 
-        if (StringUtils.isBlank(ndesc.getName())) {
-            throw new IllegalStateException("CubeDesc name must not be blank");
+        try {
+            ndesc.init(config, getMetadataManager().getAllTablesMap());
+        } catch (Exception e) {
+            ndesc.addError(e.getMessage());
         }
 
-        ndesc.init(config, getMetadataManager().getAllTablesMap());
-
-        if (ndesc.getError().isEmpty() == false) {
+        if (!allowBroken && !ndesc.getError().isEmpty()) {
             throw new IllegalStateException("Cube desc at " + path + " has issues: " + ndesc.getError());
         }
 
@@ -155,8 +159,8 @@ public class CubeDescManager {
 
         try {
             cubeDesc.init(config, getMetadataManager().getAllTablesMap());
-        } catch (IllegalStateException e) {
-            cubeDesc.addError(e.getMessage(), true);
+        } catch (Exception e) {
+            cubeDesc.addError(e.getMessage());
         }
         // Check base validation
         if (!cubeDesc.getError().isEmpty()) {
@@ -164,7 +168,7 @@ public class CubeDescManager {
         }
         // Semantic validation
         CubeMetadataValidator validator = new CubeMetadataValidator();
-        ValidateContext context = validator.validate(cubeDesc, true);
+        ValidateContext context = validator.validate(cubeDesc);
         if (!context.ifPass()) {
             return cubeDesc;
         }
@@ -200,14 +204,9 @@ public class CubeDescManager {
 
         List<String> paths = store.collectResourceRecursively(ResourceStore.CUBE_DESC_RESOURCE_ROOT, MetadataConstants.FILE_SURFIX);
         for (String path : paths) {
-            CubeDesc desc;
-            try {
-                desc = loadCubeDesc(path);
-            } catch (Exception e) {
-                logger.error("Error loading cube desc " + path, e);
-                continue;
-            }
-            if (path.equals(desc.getResourcePath()) == false) {
+            CubeDesc desc = loadCubeDesc(path, true);
+
+            if (!path.equals(desc.getResourcePath())) {
                 logger.error("Skip suspicious desc at " + path + ", " + desc + " should be at " + desc.getResourcePath());
                 continue;
             }
@@ -219,7 +218,7 @@ public class CubeDescManager {
             cubeDescMap.putLocal(desc.getName(), desc);
         }
 
-        logger.debug("Loaded " + cubeDescMap.size() + " Cube(s)");
+        logger.info("Loaded " + cubeDescMap.size() + " Cube(s)");
     }
 
     /**
@@ -241,17 +240,14 @@ public class CubeDescManager {
 
         try {
             desc.init(config, getMetadataManager().getAllTablesMap());
-        } catch (IllegalStateException e) {
-            desc.addError(e.getMessage(), true);
-            return desc;
-        } catch (IllegalArgumentException e) {
-            desc.addError(e.getMessage(), true);
+        } catch (Exception e) {
+            desc.addError(e.getMessage());
             return desc;
         }
 
         // Semantic validation
         CubeMetadataValidator validator = new CubeMetadataValidator();
-        ValidateContext context = validator.validate(desc, true);
+        ValidateContext context = validator.validate(desc);
         if (!context.ifPass()) {
             return desc;
         }
@@ -263,7 +259,7 @@ public class CubeDescManager {
         getStore().putResource(path, desc, CUBE_DESC_SERIALIZER);
 
         // Reload the CubeDesc
-        CubeDesc ndesc = loadCubeDesc(path);
+        CubeDesc ndesc = loadCubeDesc(path, false);
         // Here replace the old one
         cubeDescMap.put(ndesc.getName(), desc);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/17569f6c/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
index 851b016..a2ed051 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
@@ -35,17 +35,17 @@ import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.metadata.model.TblColRef;
 import org.apache.kylin.metadata.realization.CapabilityResult;
+import org.apache.kylin.metadata.realization.CapabilityResult.CapabilityInfluence;
 import org.apache.kylin.metadata.realization.IRealization;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
 import org.apache.kylin.metadata.realization.RealizationType;
 import org.apache.kylin.metadata.realization.SQLDigest;
-import org.apache.kylin.metadata.realization.CapabilityResult.CapabilityInfluence;
 
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonManagedReference;
 import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
 import com.google.common.base.Objects;
 import com.google.common.collect.Lists;
 
@@ -149,6 +149,13 @@ public class CubeInstance extends RootPersistentEntity implements IRealization,
         return getStatus() == RealizationStatusEnum.READY;
     }
 
+    // if cube is not online and has no data or any building job, we allow its descriptor to be
+    // in a temporary broken state, so that user can edit and fix it. Broken state is often due to
+    // schema changes at source.
+    public boolean allowBrokenDescriptor() {
+        return (getStatus() == RealizationStatusEnum.DISABLED  || getStatus() == RealizationStatusEnum.DESCBROKEN) && segments.isEmpty();
+    }
+
     public String getResourcePath() {
         return concatResourcePath(name);
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/17569f6c/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
index 2ebf5d3..daeca0d 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
@@ -18,6 +18,9 @@
 
 package org.apache.kylin.cube;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -30,8 +33,6 @@ import java.util.Random;
 import java.util.UUID;
 import java.util.concurrent.ConcurrentHashMap;
 
-import javax.annotation.Nullable;
-
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.KylinConfigExt;
@@ -65,8 +66,6 @@ import org.apache.kylin.source.SourceFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Function;
-import com.google.common.collect.Collections2;
 import com.google.common.collect.HashMultimap;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Multimap;
@@ -843,39 +842,37 @@ public class CubeManager implements IRealizationProvider {
 
     private synchronized CubeInstance reloadCubeLocalAt(String path) {
         ResourceStore store = getStore();
+        CubeInstance cube;
 
-        CubeInstance cubeInstance;
         try {
-            cubeInstance = store.getResource(path, CubeInstance.class, CUBE_SERIALIZER);
+            cube = store.getResource(path, CubeInstance.class, CUBE_SERIALIZER);
+            checkNotNull(cube, "cube (at %s) not found", path);
 
-            CubeDesc cubeDesc = CubeDescManager.getInstance(config).getCubeDesc(cubeInstance.getDescName());
-            if (cubeDesc == null)
-                throw new IllegalStateException("CubeInstance desc not found '" + cubeInstance.getDescName() + "', at " + path);
+            String cubeName = cube.getName();
+            checkState(StringUtils.isNotBlank(cubeName), "cube (at %s) name must not be blank", path);
 
-            cubeInstance.setConfig((KylinConfigExt) cubeDesc.getConfig());
+            CubeDesc cubeDesc = CubeDescManager.getInstance(config).getCubeDesc(cube.getDescName());
+            checkNotNull(cubeDesc, "cube descriptor '%s' (for cube '%s') not found", cube.getDescName(), cubeName);
 
-            if (StringUtils.isBlank(cubeInstance.getName()))
-                throw new IllegalStateException("CubeInstance name must not be blank, at " + path);
+            if (!cubeDesc.getError().isEmpty()) {
+                cube.setStatus(RealizationStatusEnum.DESCBROKEN);
+                logger.warn("cube descriptor {} (for cube '{}') is broken", cubeDesc.getResourcePath(), cubeName);
 
-            if (cubeInstance.getDescriptor() == null)
-                throw new IllegalStateException("CubeInstance desc not found '" + cubeInstance.getDescName() + "', at " + path);
+            } else if (cube.getStatus() == RealizationStatusEnum.DESCBROKEN) {
+                cube.setStatus(RealizationStatusEnum.DISABLED);
+                logger.info("cube {} changed from DESCBROKEN to DISABLED", cubeName);
+            }
 
-            final String cubeName = cubeInstance.getName();
-            cubeMap.putLocal(cubeName, cubeInstance);
+            cube.setConfig((KylinConfigExt) cubeDesc.getConfig());
+            cubeMap.putLocal(cubeName, cube);
 
-            for (CubeSegment segment : cubeInstance.getSegments()) {
+            for (CubeSegment segment : cube.getSegments()) {
                 usedStorageLocation.put(cubeName.toUpperCase(), segment.getStorageLocationIdentifier());
             }
 
-            logger.debug("Reloaded new cube: " + cubeName + " with reference being" + cubeInstance + " having " + cubeInstance.getSegments().size() + " segments:" + StringUtils.join(Collections2.transform(cubeInstance.getSegments(), new Function<CubeSegment, String>() {
-                @Nullable
-                @Override
-                public String apply(CubeSegment input) {
-                    return input.getStorageLocationIdentifier();
-                }
-            }), ","));
+            logger.info("Reloaded cube {} being {} having {} segments", cubeName, cube, cube.getSegments().size());
+            return cube;
 
-            return cubeInstance;
         } catch (Exception e) {
             logger.error("Error during load cube instance, skipping : " + path, e);
             return null;

http://git-wip-us.apache.org/repos/asf/kylin/blob/17569f6c/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
index e6b3d3f..4195451 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
@@ -18,6 +18,10 @@
 
 package org.apache.kylin.cube.model;
 
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
+
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.ArrayList;
@@ -29,9 +33,9 @@ import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.Set;
 import java.util.TreeSet;
-import java.util.Map.Entry;
 
 import javax.annotation.Nullable;
 
@@ -64,9 +68,9 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
 import com.fasterxml.jackson.annotation.JsonInclude;
 import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.google.common.base.Function;
 import com.google.common.collect.Collections2;
@@ -526,19 +530,15 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
         this.errors.clear();
         this.config = KylinConfigExt.createInstance(config, overrideKylinProps);
 
-        if (this.modelName == null || this.modelName.length() == 0) {
-            this.addError("The cubeDesc '" + this.getName() + "' doesn't have data model specified.");
-        }
+        checkArgument(StringUtils.isNotBlank(name), "CubeDesc name is blank");
+        checkArgument(StringUtils.isNotBlank(modelName), "CubeDesc(%s) has blank modelName", name);
+
+        this.model = MetadataManager.getInstance(config).getDataModelDesc(modelName);
+        checkNotNull(this.model, "DateModelDesc(%s) not found", modelName);
 
         // check if aggregation group is valid
         validate();
 
-        this.model = MetadataManager.getInstance(config).getDataModelDesc(this.modelName);
-
-        if (this.model == null) {
-            this.addError("No data model found with name '" + modelName + "'.");
-        }
-
         for (DimensionDesc dim : dimensions) {
             dim.init(this, tables);
         }
@@ -559,9 +559,9 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
 
         // check all dimension columns are presented on rowkey
         List<TblColRef> dimCols = listDimensionColumnsExcludingDerived(true);
-        if (rowkey.getRowKeyColumns().length != dimCols.size()) {
-            addError("RowKey columns count (" + rowkey.getRowKeyColumns().length + ") does not match dimension columns count (" + dimCols.size() + "). ");
-        }
+        checkState(rowkey.getRowKeyColumns().length == dimCols.size(),
+                "RowKey columns count (%d) doesn't match dimensions columns count (%d)",
+                rowkey.getRowKeyColumns().length, dimCols.size());
 
         initDictionaryDesc();
     }
@@ -954,25 +954,8 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
         this.autoMergeTimeRanges = autoMergeTimeRanges;
     }
 
-    /**
-     * Add error info and thrown exception out
-     *
-     * @param message
-     */
     public void addError(String message) {
-        addError(message, false);
-    }
-
-    /**
-     * @param message error message
-     * @param silent  if throw exception
-     */
-    public void addError(String message, boolean silent) {
-        if (!silent) {
-            throw new IllegalStateException(message);
-        } else {
-            this.errors.add(message);
-        }
+        this.errors.add(message);
     }
 
     public List<String> getError() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/17569f6c/core-cube/src/main/java/org/apache/kylin/cube/model/validation/CubeMetadataValidator.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/validation/CubeMetadataValidator.java b/core-cube/src/main/java/org/apache/kylin/cube/model/validation/CubeMetadataValidator.java
index c631f8d..c22930a 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/validation/CubeMetadataValidator.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/validation/CubeMetadataValidator.java
@@ -35,39 +35,15 @@ public class CubeMetadataValidator {
     private IValidatorRule<CubeDesc>[] rules = new IValidatorRule[] { new FunctionRule(), new AggregationGroupRule(), new RowKeyAttrRule(), new DictionaryRule() };
 
     public ValidateContext validate(CubeDesc cube) {
-        return validate(cube, false);
-    }
-
-    /**
-     * @param inject    inject error into cube desc
-     * @return
-     */
-    public ValidateContext validate(CubeDesc cube, boolean inject) {
         ValidateContext context = new ValidateContext();
-        for (int i = 0; i < rules.length; i++) {
-            IValidatorRule<CubeDesc> rule = rules[i];
+        for (IValidatorRule<CubeDesc> rule : rules) {
             rule.validate(cube, context);
         }
-        if (inject) {
-            injectResult(cube, context);
-        }
-        return context;
-    }
 
-    /**
-     * 
-     * Inject errors info into cubeDesc
-     * 
-     * @param cubeDesc
-     * @param context
-     */
-    public void injectResult(CubeDesc cubeDesc, ValidateContext context) {
-        ValidateContext.Result[] results = context.getResults();
-        for (int i = 0; i < results.length; i++) {
-            ValidateContext.Result result = results[i];
-            cubeDesc.addError(result.getLevel() + " : " + result.getMessage(), true);
+        for (ValidateContext.Result result : context.getResults()) {
+            cube.addError(result.getLevel() + " : " + result.getMessage());
         }
-
+        return context;
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/17569f6c/core-metadata/src/main/java/org/apache/kylin/metadata/realization/RealizationStatusEnum.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/RealizationStatusEnum.java b/core-metadata/src/main/java/org/apache/kylin/metadata/realization/RealizationStatusEnum.java
index e4583f2..27e2d57 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/RealizationStatusEnum.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/realization/RealizationStatusEnum.java
@@ -20,6 +20,6 @@ package org.apache.kylin.metadata.realization;
 
 public enum RealizationStatusEnum {
 
-    DISABLED, BUILDING, READY, DESCBROKEN
+    DISABLED, READY, DESCBROKEN
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/17569f6c/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
index 5397df7..42b117c 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
@@ -22,7 +22,6 @@ import java.io.IOException;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
 import java.util.Collections;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -43,6 +42,7 @@ import org.apache.kylin.job.JoinedFlatTable;
 import org.apache.kylin.metadata.model.IJoinedFlatTableDesc;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.project.ProjectInstance;
+import org.apache.kylin.metadata.realization.RealizationStatusEnum;
 import org.apache.kylin.rest.exception.BadRequestException;
 import org.apache.kylin.rest.exception.ForbiddenException;
 import org.apache.kylin.rest.exception.InternalErrorException;
@@ -74,6 +74,7 @@ import org.springframework.web.bind.annotation.ResponseBody;
 import com.fasterxml.jackson.core.JsonParseException;
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.JsonMappingException;
+import com.google.common.base.Joiner;
 import com.google.common.collect.Sets;
 
 /**
@@ -345,8 +346,12 @@ public class CubeController extends BasicController {
 
         CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
         if (cube == null) {
-            throw new InternalErrorException("Cannot find cube " + cubeName);
+            throw new BadRequestException("Cannot find cube " + cubeName);
+        }
+        if (cube.getStatus() == RealizationStatusEnum.DESCBROKEN) {
+            throw new BadRequestException("Broken cube can't be cloned");
         }
+
         CubeDesc cubeDesc = cube.getDescriptor();
         CubeDesc newCubeDesc = CubeDesc.getCopyOf(cubeDesc);
         newCubeDesc.setName(newCubeName);
@@ -446,18 +451,11 @@ public class CubeController extends BasicController {
     @ResponseBody
     public CubeRequest updateCubeDesc(@RequestBody CubeRequest cubeRequest) throws JsonProcessingException {
 
-        //update cube
         CubeDesc desc = deserializeCubeDesc(cubeRequest);
-        CubeDesc oldCubeDesc;
-        boolean isCubeDescFreeEditable;
-
         if (desc == null) {
             return cubeRequest;
         }
 
-        // Check if the cube is editable
-        isCubeDescFreeEditable = cubeService.isCubeDescFreeEditable(desc);
-
         String projectName = (null == cubeRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME : cubeRequest.getProject();
         try {
             CubeInstance cube = cubeService.getCubeManager().getCube(cubeRequest.getCubeName());
@@ -475,14 +473,14 @@ public class CubeController extends BasicController {
                 return cubeRequest;
             }
 
-            oldCubeDesc = cube.getDescriptor();
-            if (isCubeDescFreeEditable || oldCubeDesc.consistentWith(desc)) {
-                desc = cubeService.updateCubeAndDesc(cube, desc, projectName, true);
-            } else {
-                logger.warn("Won't update the cube desc due to inconsistency");
-                updateRequest(cubeRequest, false, "CubeDesc " + desc.getName() + " is inconsistent with existing. Try purge that cube first or avoid updating key cube desc fields.");
+            if (cube.getSegments().size() != 0 && !cube.getDescriptor().consistentWith(desc)) {
+                String error = "CubeDesc " + desc.getName() + " is inconsistent with existing. Try purge that cube first or avoid updating key cube desc fields.";
+                updateRequest(cubeRequest, false, error);
                 return cubeRequest;
             }
+
+            desc = cubeService.updateCubeAndDesc(cube, desc, projectName, true);
+
         } catch (AccessDeniedException accessDeniedException) {
             throw new ForbiddenException("You don't have right to update this cube.");
         } catch (Exception e) {
@@ -491,8 +489,7 @@ public class CubeController extends BasicController {
         }
 
         if (!desc.getError().isEmpty()) {
-            logger.warn("Cube " + desc.getName() + " fail to update because " + desc.getError());
-            updateRequest(cubeRequest, false, omitMessage(desc.getError()));
+            updateRequest(cubeRequest, false, Joiner.on("\n").join(desc.getError()));
             return cubeRequest;
         }
 
@@ -599,19 +596,6 @@ public class CubeController extends BasicController {
         return desc;
     }
 
-    /**
-     * @return
-     */
-    private String omitMessage(List<String> errors) {
-        StringBuffer buffer = new StringBuffer();
-        for (Iterator<String> iterator = errors.iterator(); iterator.hasNext();) {
-            String string = (String) iterator.next();
-            buffer.append(string);
-            buffer.append("\n");
-        }
-        return buffer.toString();
-    }
-
     private void updateRequest(CubeRequest request, boolean success, String message) {
         request.setCubeDescData("");
         request.setSuccessful(success);

http://git-wip-us.apache.org/repos/asf/kylin/blob/17569f6c/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java b/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java
index 2160e3d..9d134d6 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java
@@ -189,6 +189,7 @@ public class CacheService extends BasicService {
             case TABLE:
                 getMetadataManager().reloadTableCache(cacheKey);
                 CubeDescManager.clearCache();
+                clearRealizationCache();
                 break;
             case EXTERNAL_FILTER:
                 getMetadataManager().reloadExtFilter(cacheKey);
@@ -202,9 +203,7 @@ public class CacheService extends BasicService {
                 DictionaryManager.clearCache();
                 MetadataManager.clearCache();
                 CubeDescManager.clearCache();
-                CubeManager.clearCache();
-                HybridManager.clearCache();
-                RealizationRegistry.clearCache();
+                clearRealizationCache();
                 Cuboid.clearCache();
                 ProjectManager.clearCache();
                 KafkaConfigManager.clearCache();
@@ -222,6 +221,12 @@ public class CacheService extends BasicService {
         }
     }
 
+    private void clearRealizationCache() {
+        CubeManager.clearCache();
+        HybridManager.clearCache();
+        RealizationRegistry.clearCache();
+    }
+
     private void rebuildCubeCache(String cubeName) {
         CubeInstance cube = getCubeManager().reloadCubeLocal(cubeName);
         getHybridManager().reloadHybridInstanceByChild(RealizationType.CUBE, cubeName);

http://git-wip-us.apache.org/repos/asf/kylin/blob/17569f6c/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
index 4cd527c..e446045 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -273,21 +273,6 @@ public class CubeService extends BasicService {
         accessService.clean(cube, true);
     }
 
-    public boolean isCubeDescFreeEditable(CubeDesc cd) {
-        List<CubeInstance> cubes = getCubeManager().getCubesByDesc(cd.getName());
-        for (CubeInstance cube : cubes) {
-            if (cube.getSegments().size() != 0) {
-                logger.debug("cube '" + cube.getName() + " has " + cube.getSegments().size() + " segments, couldn't edit cube desc.");
-                return false;
-            }
-        }
-        return true;
-    }
-
-    public static String getCubeDescNameFromCube(String cubeName) {
-        return cubeName + DESC_SUFFIX;
-    }
-
     public static String getCubeNameFromDesc(String descName) {
         if (descName.toLowerCase().endsWith(DESC_SUFFIX)) {
             return descName.substring(0, descName.toLowerCase().indexOf(DESC_SUFFIX));

http://git-wip-us.apache.org/repos/asf/kylin/blob/17569f6c/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java b/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
index e4fbc98..5c704ba 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
@@ -48,7 +48,9 @@ import org.apache.kylin.job.execution.DefaultChainedExecutable;
 import org.apache.kylin.job.execution.ExecutableState;
 import org.apache.kylin.job.execution.Output;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
+import org.apache.kylin.metadata.realization.RealizationStatusEnum;
 import org.apache.kylin.rest.constant.Constant;
+import org.apache.kylin.rest.exception.BadRequestException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -199,6 +201,10 @@ public class JobService extends BasicService {
     public JobInstance submitJob(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset, //
             CubeBuildTypeEnum buildType, boolean force, String submitter) throws IOException, JobException {
 
+        if (cube.getStatus() == RealizationStatusEnum.DESCBROKEN) {
+            throw new BadRequestException("Broken cube " + cube.getName() + " can't be built");
+        }
+
         checkCubeDescSignature(cube);
         checkNoRunningJob(cube);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/17569f6c/server/src/test/java/org/apache/kylin/rest/service/CubeServiceTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/service/CubeServiceTest.java b/server/src/test/java/org/apache/kylin/rest/service/CubeServiceTest.java
index f98d6b9..59e96d6 100644
--- a/server/src/test/java/org/apache/kylin/rest/service/CubeServiceTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/service/CubeServiceTest.java
@@ -51,6 +51,5 @@ public class CubeServiceTest extends ServiceTestBase {
         List<CubeInstance> cubes = cubeService.listAllCubes(null, null, null);
         Assert.assertNotNull(cubes);
         CubeInstance cube = cubes.get(0);
-        cubeService.isCubeDescFreeEditable(cube.getDescriptor());
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/17569f6c/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
index 70b097c..8b98e7b 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
@@ -28,6 +28,7 @@ import java.util.UUID;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.engine.mr.HadoopUtil;
 import org.apache.kylin.metadata.MetadataConstants;
 import org.apache.kylin.metadata.MetadataManager;
@@ -36,8 +37,10 @@ import org.apache.kylin.metadata.model.TableDesc;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.LinkedHashMultimap;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
+import com.google.common.collect.SetMultimap;
 import com.google.common.collect.Sets;
 
 /**
@@ -51,27 +54,25 @@ public class HiveSourceTableLoader {
     @SuppressWarnings("unused")
     private static final Logger logger = LoggerFactory.getLogger(HiveSourceTableLoader.class);
 
-    public static final String OUTPUT_SURFIX = "json";
-    public static final String TABLE_FOLDER_NAME = "table";
-    public static final String TABLE_EXD_FOLDER_NAME = "table_exd";
-
     public static Set<String> reloadHiveTables(String[] hiveTables, KylinConfig config) throws IOException {
 
-        Map<String, Set<String>> db2tables = Maps.newHashMap();
-        for (String table : hiveTables) {
-            String[] parts = HadoopUtil.parseHiveTableName(table);
-            Set<String> set = db2tables.get(parts[0]);
-            if (set == null) {
-                set = Sets.newHashSet();
-                db2tables.put(parts[0], set);
-            }
-            set.add(parts[1]);
+        SetMultimap<String, String> db2tables = LinkedHashMultimap.create();
+        for (String fullTableName : hiveTables) {
+            String[] parts = HadoopUtil.parseHiveTableName(fullTableName);
+            db2tables.put(parts[0], parts[1]);
+        }
+
+        HiveClient hiveClient = new HiveClient();
+        SchemaChecker checker = new SchemaChecker(hiveClient, MetadataManager.getInstance(config), CubeManager.getInstance(config));
+        for (Map.Entry<String, String> entry : db2tables.entries()) {
+            SchemaChecker.CheckResult result = checker.allowReload(entry.getKey(), entry.getValue());
+            result.raiseExceptionWhenInvalid();
         }
 
         // extract from hive
         Set<String> loadedTables = Sets.newHashSet();
         for (String database : db2tables.keySet()) {
-            List<String> loaded = extractHiveTables(database, db2tables.get(database), config);
+            List<String> loaded = extractHiveTables(database, db2tables.get(database), hiveClient);
             loadedTables.addAll(loaded);
         }
 
@@ -84,13 +85,12 @@ public class HiveSourceTableLoader {
         metaMgr.removeTableExd(hiveTable);
     }
 
-    private static List<String> extractHiveTables(String database, Set<String> tables, KylinConfig config) throws IOException {
+    private static List<String> extractHiveTables(String database, Set<String> tables, HiveClient hiveClient) throws IOException {
 
         List<String> loadedTables = Lists.newArrayList();
         MetadataManager metaMgr = MetadataManager.getInstance(KylinConfig.getInstanceFromEnv());
         for (String tableName : tables) {
             Table table = null;
-            HiveClient hiveClient = new HiveClient();
             List<FieldSchema> partitionFields = null;
             List<FieldSchema> fields = null;
             try {
@@ -167,5 +167,4 @@ public class HiveSourceTableLoader {
 
         return loadedTables;
     }
-
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/17569f6c/source-hive/src/main/java/org/apache/kylin/source/hive/SchemaChecker.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/SchemaChecker.java b/source-hive/src/main/java/org/apache/kylin/source/hive/SchemaChecker.java
new file mode 100644
index 0000000..3b03551
--- /dev/null
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/SchemaChecker.java
@@ -0,0 +1,216 @@
+package org.apache.kylin.source.hive;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+import static java.lang.String.format;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import javax.annotation.Nullable;
+
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.model.CubeDesc;
+import org.apache.kylin.metadata.MetadataManager;
+import org.apache.kylin.metadata.datatype.DataType;
+import org.apache.kylin.metadata.model.ColumnDesc;
+import org.apache.kylin.metadata.model.TableDesc;
+import org.apache.kylin.metadata.model.TblColRef;
+
+import com.google.common.base.Predicate;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+
+public class SchemaChecker {
+    private final HiveClient hiveClient;
+    private final MetadataManager metadataManager;
+    private final CubeManager cubeManager;
+
+    static class CheckResult {
+        private final boolean valid;
+        private final String reason;
+
+        private CheckResult(boolean valid, String reason) {
+            this.valid = valid;
+            this.reason = reason;
+        }
+
+        void raiseExceptionWhenInvalid() {
+            if (!valid) {
+                throw new RuntimeException(reason);
+            }
+        }
+
+        static CheckResult validOnFirstLoad(String tableName) {
+            return new CheckResult(true, format("Table '%s' hasn't been loaded before", tableName));
+        }
+
+        static CheckResult validOnCompatibleSchema(String tableName) {
+            return new CheckResult(true, format("Table '%s' is compatible with all existing cubes", tableName));
+        }
+
+        static CheckResult invalidOnFetchSchema(String tableName, Exception e) {
+            return new CheckResult(false, format("Failed to fetch metadata of '%s': %s", tableName, e.getMessage()));
+        }
+
+        static CheckResult invalidOnIncompatibleSchema(String tableName, List<String> reasons) {
+            StringBuilder buf = new StringBuilder();
+            for (String reason : reasons) {
+                buf.append("- ").append(reason).append("\n");
+            }
+
+            return new CheckResult(false, format("Found %d issue(s) with '%s':\n%s Please disable and purge related cube(s) first", reasons.size(), tableName, buf.toString()));
+        }
+    }
+
+    SchemaChecker(HiveClient hiveClient, MetadataManager metadataManager, CubeManager cubeManager) {
+        this.hiveClient = checkNotNull(hiveClient, "hiveClient is null");
+        this.metadataManager = checkNotNull(metadataManager, "metadataManager is null");
+        this.cubeManager = checkNotNull(cubeManager, "cubeManager is null");
+    }
+
+    private List<FieldSchema> fetchSchema(String dbName, String tblName) throws Exception {
+        List<FieldSchema> fields = Lists.newArrayList();
+        fields.addAll(hiveClient.getHiveTableFields(dbName, tblName));
+
+        Table table = hiveClient.getHiveTable(dbName, tblName);
+        List<FieldSchema> partitionFields = table.getPartitionKeys();
+        if (partitionFields != null) {
+            fields.addAll(partitionFields);
+        }
+
+        return fields;
+    }
+
+    private List<CubeInstance> findCubeByTable(final String fullTableName) {
+        Iterable<CubeInstance> relatedCubes = Iterables.filter(cubeManager.listAllCubes(), new Predicate<CubeInstance>() {
+            @Override
+            public boolean apply(@Nullable CubeInstance cube) {
+                if (cube == null || cube.allowBrokenDescriptor()) {
+                    return false;
+                }
+                CubeDesc desc = cube.getDescriptor();
+
+                Set<String> usedTables = Sets.newHashSet();
+                usedTables.add(desc.getFactTableDesc().getIdentity());
+                for (TableDesc lookup : desc.getLookupTableDescs()) {
+                    usedTables.add(lookup.getIdentity());
+                }
+
+                return usedTables.contains(fullTableName);
+            }
+        });
+
+        return ImmutableList.copyOf(relatedCubes);
+    }
+
+    private boolean isColumnCompatible(ColumnDesc column, FieldSchema field) {
+        if (!column.getName().equalsIgnoreCase(field.getName())) {
+            return false;
+        }
+
+        String typeStr = field.getType();
+        // kylin uses double internally for float, see HiveSourceTableLoader.java
+        // TODO should this normalization to be in DataType class ?
+        if ("float".equalsIgnoreCase(typeStr)) {
+            typeStr = "double";
+        }
+        DataType fieldType = DataType.getType(typeStr);
+
+        if (column.getType().isIntegerFamily()) {
+            // OLAPTable.listSourceColumns converts some integer columns to bigint,
+            // therefore strict type comparison won't work.
+            // changing from one integer type to another should be fine.
+            return fieldType.isIntegerFamily();
+        } else {
+            // only compare base type name, changing precision or scale should be fine
+            return column.getTypeName().equals(fieldType.getName());
+        }
+    }
+
+    private List<String> checkAllUsedColumns(CubeInstance cube, TableDesc table, Map<String, FieldSchema> fieldsMap) {
+        Set<ColumnDesc> usedColumns = Sets.newHashSet();
+        for (TblColRef col : cube.getAllColumns()) {
+            usedColumns.add(col.getColumnDesc());
+        }
+
+        List<String> violateColumns = Lists.newArrayList();
+        for (ColumnDesc column : table.getColumns()) {
+            if (usedColumns.contains(column)) {
+                FieldSchema field = fieldsMap.get(column.getName());
+                if (field == null || !isColumnCompatible(column, field)) {
+                    violateColumns.add(column.getName());
+                }
+            }
+        }
+        return violateColumns;
+    }
+
+    private boolean checkAllColumns(TableDesc table, List<FieldSchema> fields) {
+        if (table.getColumnCount() != fields.size()) {
+            return false;
+        }
+
+        ColumnDesc[] columns = table.getColumns();
+        for (int i = 0; i < columns.length; i++) {
+            if (!isColumnCompatible(columns[i], fields.get(i))) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    public CheckResult allowReload(String dbName, String tblName) {
+        final String fullTableName = (dbName + "." + tblName).toUpperCase();
+
+        TableDesc existing = metadataManager.getTableDesc(fullTableName);
+        if (existing == null) {
+            return CheckResult.validOnFirstLoad(fullTableName);
+        }
+
+        List<FieldSchema> currentFields;
+        Map<String, FieldSchema> currentFieldsMap = Maps.newHashMap();
+        try {
+            currentFields = fetchSchema(dbName, tblName);
+        } catch (Exception e) {
+            return CheckResult.invalidOnFetchSchema(fullTableName, e);
+        }
+        for (FieldSchema field : currentFields) {
+            currentFieldsMap.put(field.getName().toUpperCase(), field);
+        }
+
+        List<String> issues = Lists.newArrayList();
+        for (CubeInstance cube : findCubeByTable(fullTableName)) {
+            TableDesc factTable = cube.getFactTableDesc();
+            List<TableDesc> lookupTables = cube.getDescriptor().getLookupTableDescs();
+            String modelName = cube.getDataModelDesc().getName();
+
+            // if user reloads a fact table used by cube, then all used columns
+            // must match current schema
+            if (factTable.getIdentity().equals(fullTableName)) {
+                List<String> violateColumns = checkAllUsedColumns(cube, factTable, currentFieldsMap);
+                if (!violateColumns.isEmpty()) {
+                    issues.add(format("Column %s used in cube[%s] and model[%s], but changed in hive", violateColumns, cube.getName(), modelName));
+                }
+            }
+
+            // if user reloads a lookup table used by cube, then nearly all changes in schema are disallowed)
+            for (TableDesc lookupTable : lookupTables) {
+                if (lookupTable.getIdentity().equals(fullTableName) && !checkAllColumns(lookupTable, currentFields)) {
+                    issues.add(format("Table '%s' is used as Lookup Table in cube[%s] and model[%s], but changed in hive", lookupTable.getIdentity(), cube.getName(), modelName));
+                }
+            }
+        }
+
+        if (issues.isEmpty()) {
+            return CheckResult.validOnCompatibleSchema(fullTableName);
+        }
+        return CheckResult.invalidOnIncompatibleSchema(fullTableName, issues);
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/17569f6c/webapp/app/css/AdminLTE.css
----------------------------------------------------------------------
diff --git a/webapp/app/css/AdminLTE.css b/webapp/app/css/AdminLTE.css
index e772ae5..6688457 100644
--- a/webapp/app/css/AdminLTE.css
+++ b/webapp/app/css/AdminLTE.css
@@ -4307,7 +4307,7 @@ fieldset[disabled] .btn-vk.active {
 .alert-info,
 .label-danger,
 .label-info,
-.label-waring,
+.label-warning,
 .label-primary,
 .label-success,
 .modal-primary .modal-body,
@@ -4349,7 +4349,7 @@ fieldset[disabled] .btn-vk.active {
 .bg-yellow,
 .callout.callout-warning,
 .alert-warning,
-.label-waring,
+.label-warning,
 .modal-warning .modal-body {
   background-color: #f39c12 !important;
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/17569f6c/webapp/app/partials/cubes/cubes.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/cubes/cubes.html b/webapp/app/partials/cubes/cubes.html
index 30981dc..de9b99b 100644
--- a/webapp/app/partials/cubes/cubes.html
+++ b/webapp/app/partials/cubes/cubes.html
@@ -67,7 +67,7 @@
             </td>
             <td>
                 <span class="label"
-                      ng-class="{'label-success': cube.status=='READY', 'label-default': cube.status=='DISABLED'}">
+                      ng-class="{'label-success': cube.status=='READY', 'label-default': cube.status=='DISABLED', 'label-warning': cube.status=='DESCBROKEN'}">
                     {{ cube.status}}
                 </span>
             </td>
@@ -89,18 +89,18 @@
                         Action <span class="ace-icon fa fa-caret-down icon-on-right"></span>
                     </button>
                     <ul class="dropdown-menu" role="menu">
-                        <li ng-if="cube.status=='DISABLED' && userService.hasRole('ROLE_ADMIN') ">
+                        <li ng-if="cube.status!='READY' && userService.hasRole('ROLE_ADMIN') ">
                             <a ng-click="dropCube(cube)" tooltip="Drop the cube, related jobs and data permanently.">Drop</a></li>
-                        <li ng-if="cube.status=='DISABLED' && (userService.hasRole('ROLE_ADMIN') || hasPermission(cube, permissions.ADMINISTRATION.mask, permissions.MANAGEMENT.mask))">
+                        <li ng-if="cube.status!='READY' && (userService.hasRole('ROLE_ADMIN') || hasPermission(cube, permissions.ADMINISTRATION.mask, permissions.MANAGEMENT.mask))">
                             <a ng-click="cubeEdit(cube);">Edit</a></li>
-                      <li ng-if="cube.streaming && cube.status=='DISABLED' && (userService.hasRole('ROLE_ADMIN') || hasPermission(cube, permissions.ADMINISTRATION.mask, permissions.MANAGEMENT.mask))"></li>
-                        <li><a ng-click="startJobSubmit(cube);">Build</a></li>
-                        <li><a ng-click="startRefresh(cube)">Refresh</a></li>
-                        <li><a ng-click="startMerge(cube)">Merge</a></li>
-                        <li ng-if="cube.status!='DISABLED'"><a ng-click="disable(cube)">Disable</a></li>
+                        <li ng-if="cube.streaming && cube.status=='DISABLED' && (userService.hasRole('ROLE_ADMIN') || hasPermission(cube, permissions.ADMINISTRATION.mask, permissions.MANAGEMENT.mask))"></li>
+                        <li ng-if="cube.status!='DESCBROKEN'"><a ng-click="startJobSubmit(cube);">Build</a></li>
+                        <li ng-if="cube.status!='DESCBROKEN'"><a ng-click="startRefresh(cube)">Refresh</a></li>
+                        <li ng-if="cube.status!='DESCBROKEN'"><a ng-click="startMerge(cube)">Merge</a></li>
+                        <li ng-if="cube.status=='READY'"><a ng-click="disable(cube)">Disable</a></li>
                         <li ng-if="cube.status=='DISABLED'"><a ng-click="enable(cube)">Enable</a></li>
                         <li ng-if="cube.status=='DISABLED'"><a ng-click="purge(cube)">Purge</a></li>
-                        <li><a ng-click="cloneCube(cube)">Clone</a></li>
+                        <li ng-if="cube.status!='DESCBROKEN'"><a ng-click="cloneCube(cube)">Clone</a></li>
 
                     </ul>
                 </div>
@@ -114,8 +114,8 @@
                         Action <span class="ace-icon fa fa-caret-down icon-on-right"></span>
                     </button>
                     <ul class="dropdown-menu" role="menu">
-                        <li ng-if="cube.status=='DISABLED'"><a href="cubes/edit/{{cube.name}}/descriptionjson">Edit CubeDesc</a></li>
-                      <li ng-if="cube.status=='DISABLED'"><a href="cubes/view/{{cube.name}}/instancejson">View Cube</a></li>
+                        <li ng-if="cube.status!='READY'"><a href="cubes/edit/{{cube.name}}/descriptionjson">Edit CubeDesc</a></li>
+                        <li ng-if="cube.status!='READY'"><a href="cubes/view/{{cube.name}}/instancejson">View Cube</a></li>
                     </ul>
                 </div>
             </td>


[19/50] [abbrv] kylin git commit: minor update hcatalog folder for emr

Posted by sh...@apache.org.
minor update hcatalog folder for emr

Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/3450c0f9
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/3450c0f9
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/3450c0f9

Branch: refs/heads/KYLIN-1726
Commit: 3450c0f984933d40cc60a604813a310accdfaded
Parents: 56136ed
Author: shaofengshi <sh...@apache.org>
Authored: Sat Sep 10 17:52:56 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Sat Sep 10 18:00:08 2016 +0800

----------------------------------------------------------------------
 build/bin/find-hive-dependency.sh | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/3450c0f9/build/bin/find-hive-dependency.sh
----------------------------------------------------------------------
diff --git a/build/bin/find-hive-dependency.sh b/build/bin/find-hive-dependency.sh
index 539a45b..71c2fe6 100644
--- a/build/bin/find-hive-dependency.sh
+++ b/build/bin/find-hive-dependency.sh
@@ -86,9 +86,9 @@ then
       hcatalog_home=${hadoop_home}/hive/hcatalog
     elif [ -d "${hive_home}/hcatalog" ]; then
       hcatalog_home=${hive_home}/hcatalog
-    elif [ -n is_aws ] && [ -d "/usr/lib/oozie/lib" ]; then
-      # special handling for Amazon EMR, where hcat libs are under oozie!?
-      hcatalog_home=/usr/lib/oozie/lib
+    elif [ -n is_aws ] && [ -d "/usr/lib/hive-hcatalog" ]; then
+      # special handling for Amazon EMR
+      hcatalog_home=/usr/lib/hive-hcatalog
     else 
       echo "Couldn't locate hcatalog installation, please make sure it is installed and set HCAT_HOME to the path."
       exit 1


[27/50] [abbrv] kylin git commit: KYLIN-1922 refactors

Posted by sh...@apache.org.
KYLIN-1922 refactors


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/4e8ed97d
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/4e8ed97d
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/4e8ed97d

Branch: refs/heads/KYLIN-1726
Commit: 4e8ed97d12c53c19d09f736be3baaa9112dcf413
Parents: a201c5b
Author: Hongbin Ma <ma...@apache.org>
Authored: Mon Sep 12 23:52:43 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Mon Sep 12 23:53:48 2016 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/cube/CubeInstance.java     |  10 +-
 .../org/apache/kylin/cube/model/CubeDesc.java   |   3 +-
 .../apache/kylin/gridtable/GTScanRequest.java   |   4 +-
 .../metadata/realization/IRealization.java      |   1 +
 .../apache/kylin/storage/StorageContext.java    |  34 ++++-
 .../storage/gtrecord/CubeScanRangePlanner.java  |   6 +-
 .../gtrecord/GTCubeStorageQueryBase.java        |  10 +-
 .../gtrecord/SequentialCubeTupleIterator.java   |   7 +-
 .../gtrecord/StorageResponseGTScatter.java      | 117 +++++++++++++++
 .../kylin/storage/hybrid/HybridInstance.java    |   5 +
 .../apache/kylin/query/ITCombinationTest.java   |   6 +-
 .../apache/kylin/query/ITKylinQueryTest.java    | 150 +++++++------------
 .../org/apache/kylin/query/KylinTestBase.java   |  56 +++++++
 .../src/test/resources/query/sql/query45.sql    |  23 +++
 .../test/resources/query/sql_limit/query01.sql  |  21 +++
 .../test/resources/query/sql_limit/query02.sql  |  24 +++
 .../query/sql_optimize/enable-limit01.sql       |  19 ---
 .../resources/query/sql_timeout/query02.sql     |  19 +++
 .../rules/RemoveBlackoutRealizationsRule.java   |  11 +-
 .../hbase/cube/v2/CubeHBaseEndpointRPC.java     |   3 +-
 .../hbase/cube/v2/ExpectedSizeIterator.java     |   2 +-
 .../storage/hbase/cube/v2/GTBlobScatter.java    | 150 -------------------
 .../coprocessor/endpoint/CubeVisitService.java  |   1 +
 23 files changed, 393 insertions(+), 289 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
index 151e142..851b016 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
@@ -35,17 +35,17 @@ import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.metadata.model.TblColRef;
 import org.apache.kylin.metadata.realization.CapabilityResult;
-import org.apache.kylin.metadata.realization.CapabilityResult.CapabilityInfluence;
 import org.apache.kylin.metadata.realization.IRealization;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
 import org.apache.kylin.metadata.realization.RealizationType;
 import org.apache.kylin.metadata.realization.SQLDigest;
+import org.apache.kylin.metadata.realization.CapabilityResult.CapabilityInfluence;
 
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
-import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonManagedReference;
 import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
 import com.google.common.base.Objects;
 import com.google.common.collect.Lists;
 
@@ -305,7 +305,6 @@ public class CubeInstance extends RootPersistentEntity implements IRealization,
 
         return result;
     }
-    
 
     public CubeSegment getSegment(String name, SegmentStatusEnum status) {
         for (CubeSegment segment : segments) {
@@ -404,6 +403,11 @@ public class CubeInstance extends RootPersistentEntity implements IRealization,
         return endTime;
     }
 
+    @Override
+    public boolean supportsLimitPushDown() {
+        return getDescriptor().supportsLimitPushDown();
+    }
+
     public int getRowKeyColumnCount() {
         return getDescriptor().getRowkey().getRowKeyColumns().length;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
index 2d9945a..e6b3d3f 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
@@ -992,8 +992,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
     }
 
     public boolean supportsLimitPushDown() {
-        //currently only ID_SHARDED_HBASE supports limit push down
-        return getStorageType() == IStorageAware.ID_SHARDED_HBASE;
+        return getStorageType() != IStorageAware.ID_HBASE && getStorageType() != IStorageAware.ID_HYBRID;
     }
 
     public int getStorageType() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
index 4f68806..dc90ed6 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
@@ -42,8 +42,10 @@ import com.google.common.collect.Sets;
 public class GTScanRequest {
 
     private static final Logger logger = LoggerFactory.getLogger(GTScanRequest.class);
+    
     //it's not necessary to increase the checkInterval to very large because the check cost is not high
-    public static final int terminateCheckInterval = 1000;
+    //changing it might break org.apache.kylin.query.ITKylinQueryTest.testTimeoutQuery()
+    public static final int terminateCheckInterval = 100;
 
     private GTInfo info;
     private List<GTScanRange> ranges;

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/core-metadata/src/main/java/org/apache/kylin/metadata/realization/IRealization.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/IRealization.java b/core-metadata/src/main/java/org/apache/kylin/metadata/realization/IRealization.java
index fda05ce..040cdc5 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/realization/IRealization.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/realization/IRealization.java
@@ -57,4 +57,5 @@ public interface IRealization extends IStorageAware {
 
     public long getDateRangeEnd();
 
+    public boolean supportsLimitPushDown();
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java b/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java
index acb4960..cc39918 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java
@@ -20,7 +20,11 @@ package org.apache.kylin.storage;
 
 import java.util.concurrent.atomic.AtomicLong;
 
+import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.cuboid.Cuboid;
+import org.apache.kylin.metadata.realization.IRealization;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.Range;
 
@@ -28,6 +32,7 @@ import com.google.common.collect.Range;
  * @author xjiang
  */
 public class StorageContext {
+    private static final Logger logger = LoggerFactory.getLogger(StorageContext.class);
 
     public static final int DEFAULT_THRESHOLD = 1000000;
 
@@ -35,6 +40,7 @@ public class StorageContext {
     private int threshold;
     private int limit;
     private int offset;
+    private int finalPushDownLimit;
     private boolean hasSort;
     private boolean acceptPartialResult;
 
@@ -62,6 +68,7 @@ public class StorageContext {
 
         this.acceptPartialResult = false;
         this.partialResultReturned = false;
+        this.finalPushDownLimit = Integer.MAX_VALUE;
     }
 
     public String getConnUrl() {
@@ -104,10 +111,33 @@ public class StorageContext {
         return this.enableLimit;
     }
 
-    public int getStoragePushDownLimit() {
+    private int getStoragePushDownLimit() {
         return this.isLimitEnabled() ? this.getOffset() + this.getLimit() : Integer.MAX_VALUE;
     }
-    
+
+    public int getFinalPushDownLimit() {
+        return finalPushDownLimit;
+    }
+
+    public void setFinalPushDownLimit(IRealization realization) {
+
+        //decide the final limit push down
+        int tempPushDownLimit = this.getStoragePushDownLimit();
+        if (tempPushDownLimit == Integer.MAX_VALUE) {
+            return;
+        }
+        
+        int pushDownLimitMax = KylinConfig.getInstanceFromEnv().getStoragePushDownLimitMax();
+        if (!realization.supportsLimitPushDown()) {
+            logger.info("Not enabling limit push down because cube storage type not supported");
+        } else if (tempPushDownLimit > pushDownLimitMax) {
+            logger.info("Not enabling limit push down because the limit(including offset) {} is larger than kylin.query.pushdown.limit.max {}", //
+                    tempPushDownLimit, pushDownLimitMax);
+        } else {
+            this.finalPushDownLimit = tempPushDownLimit;
+        }
+    }
+
     public void markSort() {
         this.hasSort = true;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeScanRangePlanner.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeScanRangePlanner.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeScanRangePlanner.java
index 9f505f3..b011f40 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeScanRangePlanner.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeScanRangePlanner.java
@@ -152,9 +152,9 @@ public class CubeScanRangePlanner extends ScanRangePlannerBase {
                     setAllowStorageAggregation(context.isNeedStorageAggregation()).setAggCacheMemThreshold(cubeSegment.getCubeInstance().getConfig().getQueryCoprocessorMemGB()).//
                     setStorageScanRowNumThreshold(context.getThreshold());
 
-            if (cubeDesc.supportsLimitPushDown()) {
-                builder.setStoragePushDownLimit(context.getStoragePushDownLimit());
-            }
+            if (context.getFinalPushDownLimit() != Integer.MAX_VALUE)
+                builder.setStoragePushDownLimit(context.getFinalPushDownLimit());
+
             scanRequest = builder.createGTScanRequest();
         } else {
             scanRequest = null;

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
index f0c2494..31663d0 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
@@ -113,9 +113,13 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
         // replace derived columns in filter with host columns; columns on loosened condition must be added to group by
         TupleFilter filterD = translateDerived(filter, groupsD);
 
+        //set whether to aggr at storage
         context.setNeedStorageAggregation(isNeedStorageAggregation(cuboid, groupsD, singleValuesD));
+        // set limit push down
         enableStorageLimitIfPossible(cuboid, groups, derivedPostAggregation, groupsD, filter, sqlDigest.aggregations, context);
-        setThresholdIfNecessary(dimensionsD, metrics, context); // set cautious threshold to prevent out of memory
+        context.setFinalPushDownLimit(cubeInstance);
+        // set cautious threshold to prevent out of memory
+        setThresholdIfNecessary(dimensionsD, metrics, context);
 
         List<CubeSegmentScanner> scanners = Lists.newArrayList();
         for (CubeSegment cubeSeg : cubeInstance.getSegments(SegmentStatusEnum.READY)) {
@@ -135,7 +139,7 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
         if (scanners.isEmpty())
             return ITupleIterator.EMPTY_TUPLE_ITERATOR;
 
-        return new SequentialCubeTupleIterator(scanners, cuboid, dimensionsD, metrics, returnTupleInfo, context, cubeDesc.supportsLimitPushDown());
+        return new SequentialCubeTupleIterator(scanners, cuboid, dimensionsD, metrics, returnTupleInfo, context);
     }
 
     protected boolean skipZeroInputSegment(CubeSegment cubeSegment) {
@@ -398,7 +402,7 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
     private void enableStorageLimitIfPossible(Cuboid cuboid, Collection<TblColRef> groups, Set<TblColRef> derivedPostAggregation, Collection<TblColRef> groupsD, TupleFilter filter, Collection<FunctionDesc> functionDescs, StorageContext context) {
         boolean possible = true;
 
-        boolean goodFilter = filter == null || (TupleFilter.isEvaluableRecursively(filter) && context.isCoprocessorEnabled());
+        boolean goodFilter = filter == null || TupleFilter.isEvaluableRecursively(filter);
         if (!goodFilter) {
             possible = false;
             logger.info("Storage limit push down is impossible because the filter is unevaluatable");

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SequentialCubeTupleIterator.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SequentialCubeTupleIterator.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SequentialCubeTupleIterator.java
index 7059473..bef0e88 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SequentialCubeTupleIterator.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SequentialCubeTupleIterator.java
@@ -25,7 +25,6 @@ import java.util.Set;
 
 import javax.annotation.Nullable;
 
-import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.cuboid.Cuboid;
 import org.apache.kylin.metadata.model.FunctionDesc;
 import org.apache.kylin.metadata.model.TblColRef;
@@ -54,7 +53,7 @@ public class SequentialCubeTupleIterator implements ITupleIterator {
     private int scanCountDelta;
 
     public SequentialCubeTupleIterator(List<CubeSegmentScanner> scanners, Cuboid cuboid, Set<TblColRef> selectedDimensions, //
-            Set<FunctionDesc> selectedMetrics, TupleInfo returnTupleInfo, StorageContext context, boolean supportLimitPushDown) {
+            Set<FunctionDesc> selectedMetrics, TupleInfo returnTupleInfo, StorageContext context) {
         this.context = context;
         this.scanners = scanners;
 
@@ -63,8 +62,8 @@ public class SequentialCubeTupleIterator implements ITupleIterator {
             segmentCubeTupleIterators.add(new SegmentCubeTupleIterator(scanner, cuboid, selectedDimensions, selectedMetrics, returnTupleInfo, context));
         }
 
-        this.storagePushDownLimit = context.getStoragePushDownLimit();
-        if (!supportLimitPushDown || storagePushDownLimit > KylinConfig.getInstanceFromEnv().getStoragePushDownLimitMax()) {
+        this.storagePushDownLimit = context.getFinalPushDownLimit();
+        if (storagePushDownLimit == Integer.MAX_VALUE) {
             //normal case
             tupleIterator = Iterators.concat(segmentCubeTupleIterators.iterator());
         } else {

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/StorageResponseGTScatter.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/StorageResponseGTScatter.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/StorageResponseGTScatter.java
new file mode 100644
index 0000000..fe1afd3
--- /dev/null
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/StorageResponseGTScatter.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.storage.gtrecord;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Iterator;
+
+import javax.annotation.Nullable;
+
+import org.apache.kylin.common.util.ImmutableBitSet;
+import org.apache.kylin.gridtable.GTInfo;
+import org.apache.kylin.gridtable.GTRecord;
+import org.apache.kylin.gridtable.IGTScanner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Function;
+import com.google.common.collect.Iterators;
+
+/**
+ * scatter the blob returned from region server to a iterable of gtrecords
+ */
+public class StorageResponseGTScatter implements IGTScanner {
+
+    private static final Logger logger = LoggerFactory.getLogger(StorageResponseGTScatter.class);
+
+    private GTInfo info;
+    private Iterator<byte[]> blocks;
+    private ImmutableBitSet columns;
+    private long totalScannedCount;
+    private int storagePushDownLimit = -1;
+
+    public StorageResponseGTScatter(GTInfo info, Iterator<byte[]> blocks, ImmutableBitSet columns, long totalScannedCount, int storagePushDownLimit) {
+        this.info = info;
+        this.blocks = blocks;
+        this.columns = columns;
+        this.totalScannedCount = totalScannedCount;
+        this.storagePushDownLimit = storagePushDownLimit;
+    }
+
+    @Override
+    public GTInfo getInfo() {
+        return info;
+    }
+
+    @Override
+    public long getScannedRowCount() {
+        return totalScannedCount;
+    }
+
+    @Override
+    public void close() throws IOException {
+        //do nothing
+    }
+
+    @Override
+    public Iterator<GTRecord> iterator() {
+        Iterator<Iterator<GTRecord>> shardSubsets = Iterators.transform(blocks, new EndpointResponseGTScatterFunc());
+        if (storagePushDownLimit != Integer.MAX_VALUE) {
+            return new SortedIteratorMergerWithLimit<GTRecord>(shardSubsets, storagePushDownLimit, GTRecord.getPrimaryKeyComparator()).getIterator();
+        } else {
+            return Iterators.concat(shardSubsets);
+        }
+    }
+
+    class EndpointResponseGTScatterFunc implements Function<byte[], Iterator<GTRecord>> {
+        @Nullable
+        @Override
+        public Iterator<GTRecord> apply(@Nullable final byte[] input) {
+
+            return new Iterator<GTRecord>() {
+                private ByteBuffer inputBuffer = null;
+                //rotate between two buffer GTRecord to support SortedIteratorMergerWithLimit, which will peek one more GTRecord
+                private GTRecord firstRecord = null;
+
+                @Override
+                public boolean hasNext() {
+                    if (inputBuffer == null) {
+                        inputBuffer = ByteBuffer.wrap(input);
+                        firstRecord = new GTRecord(info);
+                    }
+
+                    return inputBuffer.position() < inputBuffer.limit();
+                }
+
+                @Override
+                public GTRecord next() {
+                    firstRecord.loadColumns(columns, inputBuffer);
+                    return firstRecord;
+                }
+
+                @Override
+                public void remove() {
+                    throw new UnsupportedOperationException();
+                }
+            };
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java b/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java
index a0262e3..9b3a0fc 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/hybrid/HybridInstance.java
@@ -257,6 +257,11 @@ public class HybridInstance extends RootPersistentEntity implements IRealization
     }
 
     @Override
+    public boolean supportsLimitPushDown() {
+        return false;
+    }
+
+    @Override
     public List<TblColRef> getAllDimensions() {
         init();
         return allDimensions;

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
index cbd4e44..f4667af 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
@@ -85,10 +85,10 @@ public class ITCombinationTest extends ITKylinQueryTest {
             // unset
         }
 
-        RemoveBlackoutRealizationsRule.blackouts.clear();
+        RemoveBlackoutRealizationsRule.blackList.clear();
         if (excludeViewCubes) {
-            RemoveBlackoutRealizationsRule.blackouts.add("CUBE[name=test_kylin_cube_with_view_left_join_empty]");
-            RemoveBlackoutRealizationsRule.blackouts.add("CUBE[name=test_kylin_cube_with_view_inner_join_empty]");
+            RemoveBlackoutRealizationsRule.blackList.add("CUBE[name=test_kylin_cube_with_view_left_join_empty]");
+            RemoveBlackoutRealizationsRule.blackList.add("CUBE[name=test_kylin_cube_with_view_inner_join_empty]");
         }
 
         if ("v1".equalsIgnoreCase(queryEngine))

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
index c1c9767..b9895e8 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
@@ -21,31 +21,22 @@ package org.apache.kylin.query;
 import static org.junit.Assert.assertTrue;
 
 import java.io.File;
-import java.sql.DriverManager;
 import java.sql.SQLException;
 import java.util.List;
 import java.util.Map;
-import java.util.Properties;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.debug.BackdoorToggles;
-import org.apache.kylin.common.util.HBaseMetadataTestCase;
 import org.apache.kylin.gridtable.GTScanSelfTerminatedException;
+import org.apache.kylin.gridtable.GTScanTimeoutException;
 import org.apache.kylin.gridtable.StorageSideBehavior;
-import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.metadata.realization.RealizationType;
-import org.apache.kylin.query.enumerator.OLAPQuery;
-import org.apache.kylin.query.relnode.OLAPContext;
 import org.apache.kylin.query.routing.Candidate;
 import org.apache.kylin.query.routing.rules.RemoveBlackoutRealizationsRule;
-import org.apache.kylin.query.schema.OLAPSchemaFactory;
 import org.apache.kylin.storage.hbase.HBaseStorage;
-import org.apache.kylin.storage.hbase.cube.v1.coprocessor.observer.ObserverEnabler;
 import org.dbunit.database.DatabaseConnection;
 import org.dbunit.database.IDatabaseConnection;
-import org.hamcrest.BaseMatcher;
-import org.hamcrest.Description;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
@@ -73,8 +64,8 @@ public class ITKylinQueryTest extends KylinTestBase {
 
         setupAll();
 
-        RemoveBlackoutRealizationsRule.blackouts.add("CUBE[name=test_kylin_cube_with_view_left_join_empty]");
-        RemoveBlackoutRealizationsRule.blackouts.add("CUBE[name=test_kylin_cube_with_view_inner_join_empty]");
+        RemoveBlackoutRealizationsRule.blackList.add("CUBE[name=test_kylin_cube_with_view_left_join_empty]");
+        RemoveBlackoutRealizationsRule.blackList.add("CUBE[name=test_kylin_cube_with_view_inner_join_empty]");
     }
 
     @AfterClass
@@ -84,105 +75,69 @@ public class ITKylinQueryTest extends KylinTestBase {
         clean();
     }
 
-    protected static void setupAll() throws Exception {
-        //setup env
-        HBaseMetadataTestCase.staticCreateTestMetadata();
-        config = KylinConfig.getInstanceFromEnv();
-
-        //setup cube conn
-        File olapTmp = OLAPSchemaFactory.createTempOLAPJson(ProjectInstance.DEFAULT_PROJECT_NAME, config);
-        Properties props = new Properties();
-        props.setProperty(OLAPQuery.PROP_SCAN_THRESHOLD, "10001");
-        cubeConnection = DriverManager.getConnection("jdbc:calcite:model=" + olapTmp.getAbsolutePath(), props);
-
-        //setup h2
-        h2Connection = DriverManager.getConnection("jdbc:h2:mem:db" + (h2InstanceCount++), "sa", "");
-        // Load H2 Tables (inner join)
-        H2Database h2DB = new H2Database(h2Connection, config);
-        h2DB.loadAllTables();
-
-    }
-
-    protected static void clean() {
-        if (cubeConnection != null)
-            closeConnection(cubeConnection);
-        if (h2Connection != null)
-            closeConnection(h2Connection);
-
-        ObserverEnabler.forceCoprocessorUnset();
-        HBaseMetadataTestCase.staticCleanupTestMetadata();
-        RemoveBlackoutRealizationsRule.blackouts.clear();
-
-    }
-
     protected String getQueryFolderPrefix() {
         return "";
     }
 
+    protected Throwable findRoot(Throwable throwable) {
+        while (true) {
+            if (throwable.getCause() != null) {
+                throwable = throwable.getCause();
+            } else {
+                break;
+            }
+        }
+        return throwable;
+    }
+
     @Test
     public void testTimeoutQuery() throws Exception {
         if (HBaseStorage.overwriteStorageQuery != null) {
             //v1 engine does not suit
             return;
         }
-
-        thrown.expect(SQLException.class);
-
-        //should not break at table duplicate check, should fail at model duplicate check
-        thrown.expect(new BaseMatcher<Throwable>() {
-            @Override
-            public boolean matches(Object item) {
-
-                //find the "root"
-                Throwable throwable = (Throwable) item;
-                while (true) {
-                    if (throwable.getCause() != null) {
-                        throwable = throwable.getCause();
-                    } else {
-                        break;
-                    }
-                }
-
-                if (throwable instanceof GTScanSelfTerminatedException) {
-                    return true;
-                }
-                return false;
-            }
-
-            @Override
-            public void describeTo(Description description) {
-            }
-        });
-
-        runTimetoutQueries();
-
-    }
-
-    protected void runTimetoutQueries() throws Exception {
         try {
 
             Map<String, String> toggles = Maps.newHashMap();
             toggles.put(BackdoorToggles.DEBUG_TOGGLE_COPROCESSOR_BEHAVIOR, StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY.toString());//delay 10ms for every scan
             BackdoorToggles.setToggles(toggles);
 
-            KylinConfig.getInstanceFromEnv().setProperty("kylin.query.cube.visit.timeout.times", "0.03");//set timeout to 9s
+            KylinConfig.getInstanceFromEnv().setProperty("kylin.query.cube.visit.timeout.times", "0.01");//set timeout to 3s
 
             //these two cubes has RAW measure, will disturb limit push down
-            RemoveBlackoutRealizationsRule.blackouts.add("CUBE[name=test_kylin_cube_without_slr_left_join_empty]");
-            RemoveBlackoutRealizationsRule.blackouts.add("CUBE[name=test_kylin_cube_without_slr_inner_join_empty]");
+            RemoveBlackoutRealizationsRule.blackList.add("CUBE[name=test_kylin_cube_without_slr_left_join_empty]");
+            RemoveBlackoutRealizationsRule.blackList.add("CUBE[name=test_kylin_cube_without_slr_inner_join_empty]");
 
-            execAndCompQuery(getQueryFolderPrefix() + "src/test/resources/query/sql_timeout", null, true);
+            runTimeoutQueries();
         } finally {
 
             //these two cubes has RAW measure, will disturb limit push down
-            RemoveBlackoutRealizationsRule.blackouts.remove("CUBE[name=test_kylin_cube_without_slr_left_join_empty]");
-            RemoveBlackoutRealizationsRule.blackouts.remove("CUBE[name=test_kylin_cube_without_slr_inner_join_empty]");
+            RemoveBlackoutRealizationsRule.blackList.remove("CUBE[name=test_kylin_cube_without_slr_left_join_empty]");
+            RemoveBlackoutRealizationsRule.blackList.remove("CUBE[name=test_kylin_cube_without_slr_inner_join_empty]");
 
             KylinConfig.getInstanceFromEnv().setProperty("kylin.query.cube.visit.timeout.times", "1");//set timeout to 9s 
             BackdoorToggles.cleanToggles();
         }
     }
 
+    protected void runTimeoutQueries() throws Exception {
+        List<File> sqlFiles = getFilesFromFolder(new File(getQueryFolderPrefix() + "src/test/resources/query/sql_timeout"), ".sql");
+        for (File sqlFile : sqlFiles) {
+            try {
+                runSQL(sqlFile, false, false);
+            } catch (SQLException e) {
+
+                System.out.println(e.getMessage());
+
+                if (findRoot(e) instanceof GTScanSelfTerminatedException) {
+                    //expected
+                    continue;
+                }
+            }
+            throw new RuntimeException("Expecting GTScanTimeoutException");
+        }
+    }
+
     //don't try to ignore this test, try to clean your "temp" folder
     @Test
     public void testTempQuery() throws Exception {
@@ -346,11 +301,25 @@ public class ITKylinQueryTest extends KylinTestBase {
         execAndCompDynamicQuery(getQueryFolderPrefix() + "src/test/resources/query/sql_dynamic", null, true);
     }
 
-    @Ignore("simple query will be supported by ii")
     @Test
     public void testLimitEnabled() throws Exception {
-        runSqlFile(getQueryFolderPrefix() + "src/test/resources/query/sql_optimize/enable-limit01.sql");
-        assertLimitWasEnabled();
+        if (HBaseStorage.overwriteStorageQuery == null) {//v1 query engine will not work
+
+            try {
+                //other cubes have strange aggregation groups
+                RemoveBlackoutRealizationsRule.whiteList.add("CUBE[name=test_kylin_cube_with_slr_empty]");
+
+                List<File> sqlFiles = getFilesFromFolder(new File(getQueryFolderPrefix() + "src/test/resources/query/sql_limit"), ".sql");
+                for (File sqlFile : sqlFiles) {
+                    runSQL(sqlFile, false, false);
+                    assertTrue(checkLimitEnabled());
+                    assertTrue(checkFinalPushDownLimit());
+                }
+
+            } finally {
+                RemoveBlackoutRealizationsRule.whiteList.remove("CUBE[name=test_kylin_cube_with_slr_empty]");
+            }
+        }
     }
 
     @Test
@@ -377,13 +346,4 @@ public class ITKylinQueryTest extends KylinTestBase {
         this.batchExecuteQuery(getQueryFolderPrefix() + "src/test/resources/query/sql_window");
     }
 
-    private void assertLimitWasEnabled() {
-        OLAPContext context = getFirstOLAPContext();
-        assertTrue(context.storageContext.isLimitEnabled());
-    }
-
-    private OLAPContext getFirstOLAPContext() {
-        return OLAPContext.getThreadLocalContexts().iterator().next();
-    }
-
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
index 2ad1105..294750e 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
@@ -26,6 +26,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.nio.charset.Charset;
 import java.sql.Connection;
+import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.ResultSetMetaData;
@@ -38,12 +39,20 @@ import java.util.Arrays;
 import java.util.Comparator;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Properties;
 import java.util.Set;
 import java.util.TreeSet;
 import java.util.logging.LogManager;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.HBaseMetadataTestCase;
+import org.apache.kylin.metadata.project.ProjectInstance;
+import org.apache.kylin.query.enumerator.OLAPQuery;
+import org.apache.kylin.query.relnode.OLAPContext;
+import org.apache.kylin.query.routing.rules.RemoveBlackoutRealizationsRule;
+import org.apache.kylin.query.schema.OLAPSchemaFactory;
+import org.apache.kylin.storage.hbase.cube.v1.coprocessor.observer.ObserverEnabler;
 import org.dbunit.Assertion;
 import org.dbunit.database.DatabaseConfig;
 import org.dbunit.database.DatabaseConnection;
@@ -568,4 +577,51 @@ public class KylinTestBase {
         printInfo(sb.toString());
         return count;
     }
+
+    protected static void setupAll() throws Exception {
+        //setup env
+        HBaseMetadataTestCase.staticCreateTestMetadata();
+        config = KylinConfig.getInstanceFromEnv();
+
+        //setup cube conn
+        File olapTmp = OLAPSchemaFactory.createTempOLAPJson(ProjectInstance.DEFAULT_PROJECT_NAME, config);
+        Properties props = new Properties();
+        props.setProperty(OLAPQuery.PROP_SCAN_THRESHOLD, "10001");
+        cubeConnection = DriverManager.getConnection("jdbc:calcite:model=" + olapTmp.getAbsolutePath(), props);
+
+        //setup h2
+        h2Connection = DriverManager.getConnection("jdbc:h2:mem:db" + (h2InstanceCount++), "sa", "");
+        // Load H2 Tables (inner join)
+        H2Database h2DB = new H2Database(h2Connection, config);
+        h2DB.loadAllTables();
+
+    }
+
+    protected static void clean() {
+        if (cubeConnection != null)
+            closeConnection(cubeConnection);
+        if (h2Connection != null)
+            closeConnection(h2Connection);
+
+        ObserverEnabler.forceCoprocessorUnset();
+        HBaseMetadataTestCase.staticCleanupTestMetadata();
+        RemoveBlackoutRealizationsRule.blackList.clear();
+
+    }
+
+    protected boolean checkLimitEnabled() {
+        OLAPContext context = getFirstOLAPContext();
+        return (context.storageContext.isLimitEnabled());
+    }
+
+    protected boolean checkFinalPushDownLimit() {
+        OLAPContext context = getFirstOLAPContext();
+        return (context.storageContext.getFinalPushDownLimit() != Integer.MAX_VALUE);
+
+    }
+
+    private OLAPContext getFirstOLAPContext() {
+        return OLAPContext.getThreadLocalContexts().iterator().next();
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/kylin-it/src/test/resources/query/sql/query45.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql/query45.sql b/kylin-it/src/test/resources/query/sql/query45.sql
new file mode 100644
index 0000000..0c78657
--- /dev/null
+++ b/kylin-it/src/test/resources/query/sql/query45.sql
@@ -0,0 +1,23 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+
+
+
+select seller_id, sum(price) from test_kylin_fact
+  where lstg_format_name='FP-GTC' 
+  group by seller_id limit 20

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/kylin-it/src/test/resources/query/sql_limit/query01.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql_limit/query01.sql b/kylin-it/src/test/resources/query/sql_limit/query01.sql
new file mode 100644
index 0000000..fca8175
--- /dev/null
+++ b/kylin-it/src/test/resources/query/sql_limit/query01.sql
@@ -0,0 +1,21 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+
+select * from test_kylin_fact
+  where lstg_format_name='FP-GTC' 
+ limit 20

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/kylin-it/src/test/resources/query/sql_limit/query02.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql_limit/query02.sql b/kylin-it/src/test/resources/query/sql_limit/query02.sql
new file mode 100644
index 0000000..53f7bd7
--- /dev/null
+++ b/kylin-it/src/test/resources/query/sql_limit/query02.sql
@@ -0,0 +1,24 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+
+
+
+select seller_id, sum(price) from test_kylin_fact
+  where lstg_format_name='FP-GTC' 
+  group by seller_id limit 20
+ 

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/kylin-it/src/test/resources/query/sql_optimize/enable-limit01.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql_optimize/enable-limit01.sql b/kylin-it/src/test/resources/query/sql_optimize/enable-limit01.sql
deleted file mode 100644
index 4a62d92..0000000
--- a/kylin-it/src/test/resources/query/sql_optimize/enable-limit01.sql
+++ /dev/null
@@ -1,19 +0,0 @@
---
--- Licensed to the Apache Software Foundation (ASF) under one
--- or more contributor license agreements.  See the NOTICE file
--- distributed with this work for additional information
--- regarding copyright ownership.  The ASF licenses this file
--- to you under the Apache License, Version 2.0 (the
--- "License"); you may not use this file except in compliance
--- with the License.  You may obtain a copy of the License at
---
---     http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing, software
--- distributed under the License is distributed on an "AS IS" BASIS,
--- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
--- See the License for the specific language governing permissions and
--- limitations under the License.
---
-
-select * from test_kylin_fact limit 10

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/kylin-it/src/test/resources/query/sql_timeout/query02.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql_timeout/query02.sql b/kylin-it/src/test/resources/query/sql_timeout/query02.sql
new file mode 100644
index 0000000..2f187a4
--- /dev/null
+++ b/kylin-it/src/test/resources/query/sql_timeout/query02.sql
@@ -0,0 +1,19 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+
+select seller_id,lstg_format_name,sum(price) from test_kylin_fact group by seller_id,lstg_format_name
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/query/src/main/java/org/apache/kylin/query/routing/rules/RemoveBlackoutRealizationsRule.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/routing/rules/RemoveBlackoutRealizationsRule.java b/query/src/main/java/org/apache/kylin/query/routing/rules/RemoveBlackoutRealizationsRule.java
index 9c3d7c9..f299d17 100644
--- a/query/src/main/java/org/apache/kylin/query/routing/rules/RemoveBlackoutRealizationsRule.java
+++ b/query/src/main/java/org/apache/kylin/query/routing/rules/RemoveBlackoutRealizationsRule.java
@@ -31,15 +31,22 @@ import com.google.common.collect.Sets;
  * for IT use, exclude some cubes 
  */
 public class RemoveBlackoutRealizationsRule extends RoutingRule {
-    public static Set<String> blackouts = Sets.newHashSet();
+    public static Set<String> blackList = Sets.newHashSet();
+    public static Set<String> whiteList = Sets.newHashSet();
 
     @Override
     public void apply(List<Candidate> candidates) {
         for (Iterator<Candidate> iterator = candidates.iterator(); iterator.hasNext();) {
             Candidate candidate = iterator.next();
 
-            if (blackouts.contains(candidate.getRealization().getCanonicalName())) {
+            if (blackList.contains(candidate.getRealization().getCanonicalName())) {
                 iterator.remove();
+                continue;
+            }
+
+            if (!whiteList.isEmpty() && !whiteList.contains(candidate.getRealization().getCanonicalName())) {
+                iterator.remove();
+                continue;
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
index 573951b..c7de287 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
@@ -45,6 +45,7 @@ import org.apache.kylin.gridtable.GTInfo;
 import org.apache.kylin.gridtable.GTScanRequest;
 import org.apache.kylin.gridtable.GTScanSelfTerminatedException;
 import org.apache.kylin.gridtable.IGTScanner;
+import org.apache.kylin.storage.gtrecord.StorageResponseGTScatter;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest;
@@ -222,7 +223,7 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
             });
         }
 
-        return new GTBlobScatter(fullGTInfo, epResultItr, scanRequest.getColumns(), totalScannedCount.get(), scanRequest.getStoragePushDownLimit());
+        return new StorageResponseGTScatter(fullGTInfo, epResultItr, scanRequest.getColumns(), totalScannedCount.get(), scanRequest.getStoragePushDownLimit());
     }
 
     private ByteString serializeGTScanReq(GTScanRequest scanRequest) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
index f4729a3..c27e5fc 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
@@ -85,7 +85,7 @@ class ExpectedSizeIterator implements Iterator<byte[]> {
             byte[] ret = null;
 
             while (ret == null && coprocException == null && timeoutTS > System.currentTimeMillis()) {
-                ret = queue.poll(5000, TimeUnit.MILLISECONDS);
+                ret = queue.poll(10000, TimeUnit.MILLISECONDS);
             }
 
             if (coprocException != null) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/GTBlobScatter.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/GTBlobScatter.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/GTBlobScatter.java
deleted file mode 100644
index 631510e..0000000
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/GTBlobScatter.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *  
- *     http://www.apache.org/licenses/LICENSE-2.0
- *  
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.storage.hbase.cube.v2;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.Iterator;
-
-import javax.annotation.Nullable;
-
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.ImmutableBitSet;
-import org.apache.kylin.gridtable.GTInfo;
-import org.apache.kylin.gridtable.GTRecord;
-import org.apache.kylin.gridtable.IGTScanner;
-import org.apache.kylin.storage.gtrecord.SortedIteratorMergerWithLimit;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Function;
-import com.google.common.collect.Iterators;
-
-/**
- * scatter the blob returned from region server to a iterable of gtrecords
- */
-class GTBlobScatter implements IGTScanner {
-
-    private static final Logger logger = LoggerFactory.getLogger(GTBlobScatter.class);
-
-    private GTInfo info;
-    private Iterator<byte[]> blocks;
-    private ImmutableBitSet columns;
-    private long totalScannedCount;
-    private int storagePushDownLimit = -1;
-
-    public GTBlobScatter(GTInfo info, Iterator<byte[]> blocks, ImmutableBitSet columns, long totalScannedCount, int storagePushDownLimit) {
-        this.info = info;
-        this.blocks = blocks;
-        this.columns = columns;
-        this.totalScannedCount = totalScannedCount;
-        this.storagePushDownLimit = storagePushDownLimit;
-    }
-
-    @Override
-    public GTInfo getInfo() {
-        return info;
-    }
-
-    @Override
-    public long getScannedRowCount() {
-        return totalScannedCount;
-    }
-
-    @Override
-    public void close() throws IOException {
-        //do nothing
-    }
-
-    @Override
-    public Iterator<GTRecord> iterator() {
-        Iterator<Iterator<GTRecord>> shardSubsets = Iterators.transform(blocks, new GTBlobScatterFunc());
-        if (storagePushDownLimit <= KylinConfig.getInstanceFromEnv().getStoragePushDownLimitMax()) {
-            return new SortedIteratorMergerWithLimit<GTRecord>(shardSubsets, storagePushDownLimit, GTRecord.getPrimaryKeyComparator()).getIterator();
-        } else {
-            return Iterators.concat(shardSubsets);
-        }
-    }
-
-    class GTBlobScatterFunc implements Function<byte[], Iterator<GTRecord>> {
-        @Nullable
-        @Override
-        public Iterator<GTRecord> apply(@Nullable final byte[] input) {
-
-            return new Iterator<GTRecord>() {
-                private ByteBuffer inputBuffer = null;
-                //rotate between two buffer GTRecord to support SortedIteratorMergerWithLimit, which will peek one more GTRecord
-                private GTRecord firstRecord = null;
-                private GTRecord secondRecord = null;
-                private GTRecord thirdRecord = null;
-                private GTRecord fourthRecord = null;
-                private int counter = 0;
-
-                @Override
-                public boolean hasNext() {
-                    if (inputBuffer == null) {
-                        inputBuffer = ByteBuffer.wrap(input);
-                        firstRecord = new GTRecord(info);
-                        secondRecord = new GTRecord(info);
-                        thirdRecord = new GTRecord(info);
-                        fourthRecord = new GTRecord(info);
-                    }
-
-                    return inputBuffer.position() < inputBuffer.limit();
-                }
-
-                @Override
-                public GTRecord next() {
-                    firstRecord.loadColumns(columns, inputBuffer);
-                    //logger.info("A GTRecord: " + System.identityHashCode(this) + " " + firstRecord + " " + System.identityHashCode(firstRecord));
-                    return firstRecord;
-                    //                    GTRecord temp = new GTRecord(info);
-                    //                    temp.loadColumns(columns, inputBuffer);
-                    //                    return temp;
-
-                    //                    counter++;
-                    //                    int index = counter % 4;
-                    //                    if (index == 1) {
-                    //                        firstRecord.loadColumns(columns, inputBuffer);
-                    //                        //logger.info("A GTRecord: " + System.identityHashCode(this) + " " + firstRecord + " " + System.identityHashCode(firstRecord));
-                    //                        return firstRecord;
-                    //                    } else if (index == 2) {
-                    //                        secondRecord.loadColumns(columns, inputBuffer);
-                    //                        //logger.info("B GTRecord: " + System.identityHashCode(this) + " " + secondRecord + " " + System.identityHashCode(secondRecord));
-                    //                        return secondRecord;
-                    //                    } else if (index == 3) {
-                    //                        thirdRecord.loadColumns(columns, inputBuffer);
-                    //                        //logger.info("C GTRecord: " + System.identityHashCode(this) + " " + thirdRecord + " " + System.identityHashCode(thirdRecord));
-                    //                        return thirdRecord;
-                    //                    } else {
-                    //                        fourthRecord.loadColumns(columns, inputBuffer);
-                    //                        //logger.info("D GTRecord: " + System.identityHashCode(this) + " " + fourthRecord + " " + System.identityHashCode(fourthRecord));
-                    //                        return fourthRecord;
-                    //                    }
-                }
-
-                @Override
-                public void remove() {
-                    throw new UnsupportedOperationException();
-                }
-            };
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e8ed97d/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
index ffe41c5..13a7b53 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
@@ -246,6 +246,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
 
             final MutableBoolean scanNormalComplete = new MutableBoolean(true);
             final long deadline = scanReq.getTimeout() + this.serviceStartTime;
+            logger.info("deadline is " + deadline);
             final long storagePushDownLimit = scanReq.getStoragePushDownLimit();
 
             final CellListIterator cellListIterator = new CellListIterator() {


[26/50] [abbrv] kylin git commit: minor, better log on dictionary creation failure

Posted by sh...@apache.org.
minor, better log on dictionary creation failure


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/6db4b172
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/6db4b172
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/6db4b172

Branch: refs/heads/KYLIN-1726
Commit: 6db4b1723c47762280128ec7c08a160fdc9a69d7
Parents: aef7869
Author: Li Yang <li...@apache.org>
Authored: Mon Sep 12 15:28:58 2016 +0800
Committer: Li Yang <li...@apache.org>
Committed: Mon Sep 12 15:28:58 2016 +0800

----------------------------------------------------------------------
 .../apache/kylin/common/KylinConfigBase.java    |   2 +-
 .../apache/kylin/dict/DictionaryGenerator.java  | 378 +++++++++----------
 .../apache/kylin/dict/DictionaryManager.java    |   2 +
 3 files changed, 192 insertions(+), 190 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/6db4b172/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index de9051c..79ee084 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -449,7 +449,7 @@ abstract public class KylinConfigBase implements Serializable {
     }
 
     public int getDictionaryMaxCardinality() {
-        return Integer.parseInt(getOptional("kylin.dictionary.max.cardinality", "5000000"));
+        return Integer.parseInt(getOptional("kylin.dictionary.max.cardinality", "30000000"));
     }
 
     public int getTableSnapshotMaxMB() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/6db4b172/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryGenerator.java b/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryGenerator.java
index 5bd3357..ba848c6 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryGenerator.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryGenerator.java
@@ -1,189 +1,189 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.dict;
-
-import java.io.IOException;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.Bytes;
-import org.apache.kylin.common.util.Dictionary;
-import org.apache.kylin.metadata.datatype.DataType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Preconditions;
-
-/**
- * @author yangli9
- */
-@SuppressWarnings({ "rawtypes", "unchecked" })
-public class DictionaryGenerator {
-
-    private static final int DICT_MAX_CARDINALITY = getDictionaryMaxCardinality();
-
-    private static final Logger logger = LoggerFactory.getLogger(DictionaryGenerator.class);
-
-    private static final String[] DATE_PATTERNS = new String[] { "yyyy-MM-dd", "yyyyMMdd" };
-
-    private static int getDictionaryMaxCardinality() {
-        try {
-            return KylinConfig.getInstanceFromEnv().getDictionaryMaxCardinality();
-        } catch (Throwable e) {
-            return 5000000; // some test case does not have KylinConfig setup properly
-        }
-    }
-
-    public static Dictionary<String> buildDictionary(DataType dataType, IDictionaryValueEnumerator valueEnumerator) throws IOException {
-        Preconditions.checkNotNull(dataType, "dataType cannot be null");
-
-        // build dict, case by data type
-        IDictionaryBuilder builder;
-        if (dataType.isDateTimeFamily()) {
-            if (dataType.isDate())
-                builder = new DateDictBuilder();
-            else
-                builder = new TimeDictBuilder();
-        } else if (dataType.isNumberFamily()) {
-            builder = new NumberDictBuilder();
-        } else {
-            builder = new StringDictBuilder();
-        }
-
-        return buildDictionary(builder, null, valueEnumerator);
-    }
-
-    public static Dictionary<String> buildDictionary(IDictionaryBuilder builder, DictionaryInfo dictInfo, IDictionaryValueEnumerator valueEnumerator) throws IOException {
-        int baseId = 0; // always 0 for now
-        int nSamples = 5;
-        ArrayList<String> samples = new ArrayList<String>(nSamples);
-
-        Dictionary<String> dict = builder.build(dictInfo, valueEnumerator, baseId, nSamples, samples);
-
-        // log a few samples
-        StringBuilder buf = new StringBuilder();
-        for (String s : samples) {
-            if (buf.length() > 0) {
-                buf.append(", ");
-            }
-            buf.append(s.toString()).append("=>").append(dict.getIdFromValue(s));
-        }
-        logger.debug("Dictionary value samples: " + buf.toString());
-        logger.debug("Dictionary cardinality: " + dict.getSize());
-        logger.debug("Dictionary builder class: " + builder.getClass().getName());
-        logger.debug("Dictionary class: " + dict.getClass().getName());
-        if (dict instanceof TrieDictionary && dict.getSize() > DICT_MAX_CARDINALITY) {
-            throw new IllegalArgumentException("Too high cardinality is not suitable for dictionary -- cardinality: " + dict.getSize());
-        }
-        return dict;
-    }
-
-    public static Dictionary mergeDictionaries(DataType dataType, List<DictionaryInfo> sourceDicts) throws IOException {
-        return buildDictionary(dataType, new MultipleDictionaryValueEnumerator(sourceDicts));
-    }
-
-    private static class DateDictBuilder implements IDictionaryBuilder {
-        @Override
-        public Dictionary<String> build(DictionaryInfo dictInfo, IDictionaryValueEnumerator valueEnumerator, int baseId, int nSamples, ArrayList<String> returnSamples) throws IOException {
-            final int BAD_THRESHOLD = 0;
-            String matchPattern = null;
-            byte[] value;
-
-            for (String ptn : DATE_PATTERNS) {
-                matchPattern = ptn; // be optimistic
-                int badCount = 0;
-                SimpleDateFormat sdf = new SimpleDateFormat(ptn);
-                while (valueEnumerator.moveNext()) {
-                    value = valueEnumerator.current();
-                    if (value == null || value.length == 0)
-                        continue;
-
-                    String str = Bytes.toString(value);
-                    try {
-                        sdf.parse(str);
-                        if (returnSamples.size() < nSamples && returnSamples.contains(str) == false)
-                            returnSamples.add(str);
-                    } catch (ParseException e) {
-                        logger.info("Unrecognized date value: " + str);
-                        badCount++;
-                        if (badCount > BAD_THRESHOLD) {
-                            matchPattern = null;
-                            break;
-                        }
-                    }
-                }
-                if (matchPattern != null) {
-                    return new DateStrDictionary(matchPattern, baseId);
-                }
-            }
-
-            throw new IllegalStateException("Unrecognized datetime value");
-        }
-    }
-
-    private static class TimeDictBuilder implements IDictionaryBuilder {
-        @Override
-        public Dictionary<String> build(DictionaryInfo dictInfo, IDictionaryValueEnumerator valueEnumerator, int baseId, int nSamples, ArrayList<String> returnSamples) throws IOException {
-            return new TimeStrDictionary(); // base ID is always 0
-        }
-    }
-
-    private static class StringDictBuilder implements IDictionaryBuilder {
-        @Override
-        public Dictionary<String> build(DictionaryInfo dictInfo, IDictionaryValueEnumerator valueEnumerator, int baseId, int nSamples, ArrayList<String> returnSamples) throws IOException {
-            TrieDictionaryBuilder builder = new TrieDictionaryBuilder(new StringBytesConverter());
-            byte[] value;
-            while (valueEnumerator.moveNext()) {
-                value = valueEnumerator.current();
-                if (value == null)
-                    continue;
-                String v = Bytes.toString(value);
-                builder.addValue(v);
-                if (returnSamples.size() < nSamples && returnSamples.contains(v) == false)
-                    returnSamples.add(v);
-            }
-            return builder.build(baseId);
-        }
-    }
-
-    private static class NumberDictBuilder implements IDictionaryBuilder {
-        @Override
-        public Dictionary<String> build(DictionaryInfo dictInfo, IDictionaryValueEnumerator valueEnumerator, int baseId, int nSamples, ArrayList<String> returnSamples) throws IOException {
-            NumberDictionaryBuilder builder = new NumberDictionaryBuilder(new StringBytesConverter());
-            byte[] value;
-            while (valueEnumerator.moveNext()) {
-                value = valueEnumerator.current();
-                if (value == null)
-                    continue;
-                String v = Bytes.toString(value);
-                if (StringUtils.isBlank(v)) // empty string is null for numbers
-                    continue;
-
-                builder.addValue(v);
-                if (returnSamples.size() < nSamples && returnSamples.contains(v) == false)
-                    returnSamples.add(v);
-            }
-            return builder.build(baseId);
-        }
-    }
-}
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.dict;
+
+import java.io.IOException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.Bytes;
+import org.apache.kylin.common.util.Dictionary;
+import org.apache.kylin.metadata.datatype.DataType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Preconditions;
+
+/**
+ * @author yangli9
+ */
+@SuppressWarnings({ "rawtypes", "unchecked" })
+public class DictionaryGenerator {
+
+    private static final int DICT_MAX_CARDINALITY = getDictionaryMaxCardinality();
+
+    private static final Logger logger = LoggerFactory.getLogger(DictionaryGenerator.class);
+
+    private static final String[] DATE_PATTERNS = new String[] { "yyyy-MM-dd", "yyyyMMdd" };
+
+    private static int getDictionaryMaxCardinality() {
+        try {
+            return KylinConfig.getInstanceFromEnv().getDictionaryMaxCardinality();
+        } catch (Throwable e) {
+            return 30000000; // some test case does not have KylinConfig setup properly
+        }
+    }
+
+    public static Dictionary<String> buildDictionary(DataType dataType, IDictionaryValueEnumerator valueEnumerator) throws IOException {
+        Preconditions.checkNotNull(dataType, "dataType cannot be null");
+
+        // build dict, case by data type
+        IDictionaryBuilder builder;
+        if (dataType.isDateTimeFamily()) {
+            if (dataType.isDate())
+                builder = new DateDictBuilder();
+            else
+                builder = new TimeDictBuilder();
+        } else if (dataType.isNumberFamily()) {
+            builder = new NumberDictBuilder();
+        } else {
+            builder = new StringDictBuilder();
+        }
+
+        return buildDictionary(builder, null, valueEnumerator);
+    }
+
+    public static Dictionary<String> buildDictionary(IDictionaryBuilder builder, DictionaryInfo dictInfo, IDictionaryValueEnumerator valueEnumerator) throws IOException {
+        int baseId = 0; // always 0 for now
+        int nSamples = 5;
+        ArrayList<String> samples = new ArrayList<String>(nSamples);
+
+        Dictionary<String> dict = builder.build(dictInfo, valueEnumerator, baseId, nSamples, samples);
+
+        // log a few samples
+        StringBuilder buf = new StringBuilder();
+        for (String s : samples) {
+            if (buf.length() > 0) {
+                buf.append(", ");
+            }
+            buf.append(s.toString()).append("=>").append(dict.getIdFromValue(s));
+        }
+        logger.debug("Dictionary value samples: " + buf.toString());
+        logger.debug("Dictionary cardinality: " + dict.getSize());
+        logger.debug("Dictionary builder class: " + builder.getClass().getName());
+        logger.debug("Dictionary class: " + dict.getClass().getName());
+        if (dict instanceof TrieDictionary && dict.getSize() > DICT_MAX_CARDINALITY) {
+            throw new IllegalArgumentException("Too high cardinality is not suitable for dictionary -- cardinality: " + dict.getSize());
+        }
+        return dict;
+    }
+
+    public static Dictionary mergeDictionaries(DataType dataType, List<DictionaryInfo> sourceDicts) throws IOException {
+        return buildDictionary(dataType, new MultipleDictionaryValueEnumerator(sourceDicts));
+    }
+
+    private static class DateDictBuilder implements IDictionaryBuilder {
+        @Override
+        public Dictionary<String> build(DictionaryInfo dictInfo, IDictionaryValueEnumerator valueEnumerator, int baseId, int nSamples, ArrayList<String> returnSamples) throws IOException {
+            final int BAD_THRESHOLD = 0;
+            String matchPattern = null;
+            byte[] value;
+
+            for (String ptn : DATE_PATTERNS) {
+                matchPattern = ptn; // be optimistic
+                int badCount = 0;
+                SimpleDateFormat sdf = new SimpleDateFormat(ptn);
+                while (valueEnumerator.moveNext()) {
+                    value = valueEnumerator.current();
+                    if (value == null || value.length == 0)
+                        continue;
+
+                    String str = Bytes.toString(value);
+                    try {
+                        sdf.parse(str);
+                        if (returnSamples.size() < nSamples && returnSamples.contains(str) == false)
+                            returnSamples.add(str);
+                    } catch (ParseException e) {
+                        logger.info("Unrecognized date value: " + str);
+                        badCount++;
+                        if (badCount > BAD_THRESHOLD) {
+                            matchPattern = null;
+                            break;
+                        }
+                    }
+                }
+                if (matchPattern != null) {
+                    return new DateStrDictionary(matchPattern, baseId);
+                }
+            }
+
+            throw new IllegalStateException("Unrecognized datetime value");
+        }
+    }
+
+    private static class TimeDictBuilder implements IDictionaryBuilder {
+        @Override
+        public Dictionary<String> build(DictionaryInfo dictInfo, IDictionaryValueEnumerator valueEnumerator, int baseId, int nSamples, ArrayList<String> returnSamples) throws IOException {
+            return new TimeStrDictionary(); // base ID is always 0
+        }
+    }
+
+    private static class StringDictBuilder implements IDictionaryBuilder {
+        @Override
+        public Dictionary<String> build(DictionaryInfo dictInfo, IDictionaryValueEnumerator valueEnumerator, int baseId, int nSamples, ArrayList<String> returnSamples) throws IOException {
+            TrieDictionaryBuilder builder = new TrieDictionaryBuilder(new StringBytesConverter());
+            byte[] value;
+            while (valueEnumerator.moveNext()) {
+                value = valueEnumerator.current();
+                if (value == null)
+                    continue;
+                String v = Bytes.toString(value);
+                builder.addValue(v);
+                if (returnSamples.size() < nSamples && returnSamples.contains(v) == false)
+                    returnSamples.add(v);
+            }
+            return builder.build(baseId);
+        }
+    }
+
+    private static class NumberDictBuilder implements IDictionaryBuilder {
+        @Override
+        public Dictionary<String> build(DictionaryInfo dictInfo, IDictionaryValueEnumerator valueEnumerator, int baseId, int nSamples, ArrayList<String> returnSamples) throws IOException {
+            NumberDictionaryBuilder builder = new NumberDictionaryBuilder(new StringBytesConverter());
+            byte[] value;
+            while (valueEnumerator.moveNext()) {
+                value = valueEnumerator.current();
+                if (value == null)
+                    continue;
+                String v = Bytes.toString(value);
+                if (StringUtils.isBlank(v)) // empty string is null for numbers
+                    continue;
+
+                builder.addValue(v);
+                if (returnSamples.size() < nSamples && returnSamples.contains(v) == false)
+                    returnSamples.add(v);
+            }
+            return builder.build(baseId);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/6db4b172/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryManager.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryManager.java b/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryManager.java
index ff088b9..c8a7a54 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryManager.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/DictionaryManager.java
@@ -321,6 +321,8 @@ public class DictionaryManager {
                 dictionary = DictionaryGenerator.buildDictionary(DataType.getType(dictInfo.getDataType()), columnValueEnumerator);
             else
                 dictionary = DictionaryGenerator.buildDictionary((IDictionaryBuilder) ClassUtil.newInstance(builderClass), dictInfo, columnValueEnumerator);
+        } catch (Exception ex) {
+            throw new RuntimeException("Failed to create dictionary on " + col, ex);
         } finally {
             if (columnValueEnumerator != null)
                 columnValueEnumerator.close();


[20/50] [abbrv] kylin git commit: minor, fix CI

Posted by sh...@apache.org.
minor, fix CI


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/5e95abde
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/5e95abde
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/5e95abde

Branch: refs/heads/KYLIN-1726
Commit: 5e95abdec6da71a609cea80fa6569328b0c1e6cf
Parents: 3450c0f
Author: Hongbin Ma <ma...@apache.org>
Authored: Sat Sep 10 19:26:29 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Sat Sep 10 19:26:42 2016 +0800

----------------------------------------------------------------------
 .../src/test/java/org/apache/kylin/query/ITKylinQueryTest.java  | 5 +++++
 1 file changed, 5 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/5e95abde/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
index 3411c91..de68c7a 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
@@ -121,6 +121,11 @@ public class ITKylinQueryTest extends KylinTestBase {
 
     @Test
     public void testTimeoutQuery() throws Exception {
+        if (HBaseStorage.overwriteStorageQuery != null) {
+            //v1 engine does not suit
+            return;
+        }
+
         thrown.expect(SQLException.class);
 
         //should not break at table duplicate check, should fail at model duplicate check


[41/50] [abbrv] kylin git commit: KYLIN-1818 change kafka dependency to provided

Posted by sh...@apache.org.
KYLIN-1818 change kafka dependency to provided

Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/4e060e7b
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/4e060e7b
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/4e060e7b

Branch: refs/heads/KYLIN-1726
Commit: 4e060e7bc89f757a138dfa5beba2a43209e91371
Parents: 42dafc1
Author: shaofengshi <sh...@apache.org>
Authored: Fri Sep 2 18:58:11 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Wed Sep 14 16:34:36 2016 +0800

----------------------------------------------------------------------
 build/bin/find-kafka-dependency.sh                      | 12 ++++++------
 build/bin/kylin.sh                                      |  2 ++
 .../kylin/engine/mr/common/AbstractHadoopJob.java       |  4 ++--
 3 files changed, 10 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/4e060e7b/build/bin/find-kafka-dependency.sh
----------------------------------------------------------------------
diff --git a/build/bin/find-kafka-dependency.sh b/build/bin/find-kafka-dependency.sh
index c6b9c24..7349360 100644
--- a/build/bin/find-kafka-dependency.sh
+++ b/build/bin/find-kafka-dependency.sh
@@ -32,20 +32,20 @@ then
 fi
 
 # works for kafka 9+
-kafka_client=`find -L "$(dirname $kafka_home)" -name 'kafka-clients-[a-z0-9A-Z\.-]*.jar' ! -name '*doc*' ! -name '*test*' ! -name '*sources*' ''-printf '%p:' | sed 's/:$//'`
-if [ -z "$kafka_client" ]
+kafka_dependency=`find -L $kafka_home -name 'kafka-clients-[a-z0-9A-Z\.-]*.jar' ! -name '*doc*' ! -name '*test*' ! -name '*sources*' ''-printf '%p:' | sed 's/:$//'`
+if [ -z "$kafka_dependency" ]
 then
 # works for kafka 8
-    kafka_broker=`find -L "$(dirname $kafka_home)" -name 'kafka_[a-z0-9A-Z\.-]*.jar' ! -name '*doc*' ! -name '*test*' ! -name '*sources*' ''-printf '%p:' | sed 's/:$//'`
-    if [ -z "$kafka_broker" ]
+    kafka_dependency=`find -L $kafka_home -name 'kafka_[a-z0-9A-Z\.-]*.jar' ! -name '*doc*' ! -name '*test*' ! -name '*sources*' ''-printf '%p:' | sed 's/:$//'`
+    if [ -z "$kafka_dependency" ]
     then
         echo "kafka client lib not found"
         exit 1
     else
-        echo "kafka dependency: $kafka_broker"
+        echo "kafka dependency: $kafka_dependency"
         export kafka_dependency
     fi
 else
-    echo "kafka dependency: $kafka_client"
+    echo "kafka dependency: $kafka_dependency"
     export kafka_dependency
 fi

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e060e7b/build/bin/kylin.sh
----------------------------------------------------------------------
diff --git a/build/bin/kylin.sh b/build/bin/kylin.sh
index 201b5b6..7a9d2a1 100644
--- a/build/bin/kylin.sh
+++ b/build/bin/kylin.sh
@@ -31,6 +31,7 @@ function retrieveDependency() {
     #retrive $hive_dependency and $hbase_dependency
     source ${dir}/find-hive-dependency.sh
     source ${dir}/find-hbase-dependency.sh
+    source ${dir}/find-kafka-dependency.sh
 
     #retrive $KYLIN_EXTRA_START_OPTS
     if [ -f "${dir}/setenv.sh" ]
@@ -106,6 +107,7 @@ then
     -Djava.io.tmpdir=${tomcat_root}/temp  \
     -Dkylin.hive.dependency=${hive_dependency} \
     -Dkylin.hbase.dependency=${hbase_dependency} \
+    -Dkylin.kafka.dependency=${kafka_dependency} \
     -Dkylin.rest.address=${kylin_rest_address} \
     -Dspring.profiles.active=${spring_profile} \
     org.apache.hadoop.util.RunJar ${tomcat_root}/bin/bootstrap.jar  org.apache.catalina.startup.Bootstrap start >> ${KYLIN_HOME}/logs/kylin.out 2>&1 & echo $! > ${KYLIN_HOME}/pid &

http://git-wip-us.apache.org/repos/asf/kylin/blob/4e060e7b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
index af2ed9f..a138eec 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
@@ -226,11 +226,11 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
             }
         }
 
-        // for hive dependencies
+        // for kafka dependencies
         if (kylinKafkaDependency != null) {
             kylinKafkaDependency = kylinKafkaDependency.replace(":", ",");
 
-            logger.info("Kafka Dependencies Before Filtered: " + kylinHiveDependency);
+            logger.info("Kafka Dependencies Before Filtered: " + kylinKafkaDependency);
 
             if (kylinDependency.length() > 0)
                 kylinDependency.append(",");


[22/50] [abbrv] kylin git commit: KYLIN-1983 add license header

Posted by sh...@apache.org.
KYLIN-1983 add license header

Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/c5c85017
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/c5c85017
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/c5c85017

Branch: refs/heads/KYLIN-1726
Commit: c5c85017c135f03ac72ebbc9d1bb51f796eb0551
Parents: b941f11
Author: shaofengshi <sh...@apache.org>
Authored: Sun Sep 11 09:58:48 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Sun Sep 11 10:13:28 2016 +0800

----------------------------------------------------------------------
 .../kylin/job/streaming/StreamDataLoader.java    | 19 ++++++++++++++++++-
 .../model/validation/rule/FunctionRuleTest.java  | 18 ++++++++++++++++++
 .../optrule/AggregateMultipleExpandRule.java     | 18 ++++++++++++++++++
 .../optrule/AggregateProjectReduceRule.java      | 18 ++++++++++++++++++
 .../PasswordPlaceHolderConfigurerTest.java       | 18 ++++++++++++++++++
 tomcat-ext/pom.xml                               | 18 ++++++++++++++++++
 6 files changed, 108 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/c5c85017/assembly/src/test/java/org/apache/kylin/job/streaming/StreamDataLoader.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/streaming/StreamDataLoader.java b/assembly/src/test/java/org/apache/kylin/job/streaming/StreamDataLoader.java
index 50fc883..2f7d54d 100644
--- a/assembly/src/test/java/org/apache/kylin/job/streaming/StreamDataLoader.java
+++ b/assembly/src/test/java/org/apache/kylin/job/streaming/StreamDataLoader.java
@@ -1,6 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
 package org.apache.kylin.job.streaming;
 
-import org.apache.kylin.source.kafka.config.KafkaClusterConfig;
 import org.apache.kylin.source.kafka.config.KafkaConfig;
 
 import java.util.List;

http://git-wip-us.apache.org/repos/asf/kylin/blob/c5c85017/core-cube/src/test/java/org/apache/kylin/cube/model/validation/rule/FunctionRuleTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/model/validation/rule/FunctionRuleTest.java b/core-cube/src/test/java/org/apache/kylin/cube/model/validation/rule/FunctionRuleTest.java
index 48e01e3..e041080 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/model/validation/rule/FunctionRuleTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/model/validation/rule/FunctionRuleTest.java
@@ -1,3 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
 package org.apache.kylin.cube.model.validation.rule;
 
 import org.apache.kylin.common.KylinConfig;

http://git-wip-us.apache.org/repos/asf/kylin/blob/c5c85017/query/src/main/java/org/apache/kylin/query/optrule/AggregateMultipleExpandRule.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/optrule/AggregateMultipleExpandRule.java b/query/src/main/java/org/apache/kylin/query/optrule/AggregateMultipleExpandRule.java
index eb7a03d..03a0674 100644
--- a/query/src/main/java/org/apache/kylin/query/optrule/AggregateMultipleExpandRule.java
+++ b/query/src/main/java/org/apache/kylin/query/optrule/AggregateMultipleExpandRule.java
@@ -1,3 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
 package org.apache.kylin.query.optrule;
 
 import com.google.common.base.Predicate;

http://git-wip-us.apache.org/repos/asf/kylin/blob/c5c85017/query/src/main/java/org/apache/kylin/query/optrule/AggregateProjectReduceRule.java
----------------------------------------------------------------------
diff --git a/query/src/main/java/org/apache/kylin/query/optrule/AggregateProjectReduceRule.java b/query/src/main/java/org/apache/kylin/query/optrule/AggregateProjectReduceRule.java
index 8c446e4..f6ac61a 100644
--- a/query/src/main/java/org/apache/kylin/query/optrule/AggregateProjectReduceRule.java
+++ b/query/src/main/java/org/apache/kylin/query/optrule/AggregateProjectReduceRule.java
@@ -1,3 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
 package org.apache.kylin.query.optrule;
 
 import java.util.ArrayList;

http://git-wip-us.apache.org/repos/asf/kylin/blob/c5c85017/server-base/src/test/java/org/apache/kylin/rest/security/PasswordPlaceHolderConfigurerTest.java
----------------------------------------------------------------------
diff --git a/server-base/src/test/java/org/apache/kylin/rest/security/PasswordPlaceHolderConfigurerTest.java b/server-base/src/test/java/org/apache/kylin/rest/security/PasswordPlaceHolderConfigurerTest.java
index 3afd2ca..8f53084 100644
--- a/server-base/src/test/java/org/apache/kylin/rest/security/PasswordPlaceHolderConfigurerTest.java
+++ b/server-base/src/test/java/org/apache/kylin/rest/security/PasswordPlaceHolderConfigurerTest.java
@@ -1,3 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
 package org.apache.kylin.rest.security;
 
 import org.junit.Assert;

http://git-wip-us.apache.org/repos/asf/kylin/blob/c5c85017/tomcat-ext/pom.xml
----------------------------------------------------------------------
diff --git a/tomcat-ext/pom.xml b/tomcat-ext/pom.xml
index 1a171ee..f6af642 100644
--- a/tomcat-ext/pom.xml
+++ b/tomcat-ext/pom.xml
@@ -1,4 +1,22 @@
 <?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
 <project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
          xmlns="http://maven.apache.org/POM/4.0.0"
          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">


[34/50] [abbrv] kylin git commit: minor, CacheService pring error (instead of exception) on unknown cache type

Posted by sh...@apache.org.
minor, CacheService pring error (instead of exception) on unknown cache type


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/ad5844ba
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/ad5844ba
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/ad5844ba

Branch: refs/heads/KYLIN-1726
Commit: ad5844babe7473f5116c69bb9d137fc690a8a277
Parents: c8326d7
Author: Li Yang <li...@apache.org>
Authored: Tue Sep 13 18:18:03 2016 +0800
Committer: Li Yang <li...@apache.org>
Committed: Tue Sep 13 18:18:03 2016 +0800

----------------------------------------------------------------------
 .../src/main/java/org/apache/kylin/rest/service/CacheService.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/ad5844ba/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java b/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java
index cba3286..5e2d544 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CacheService.java
@@ -213,7 +213,7 @@ public class CacheService extends BasicService {
                 removeAllOLAPDataSources();
                 break;
             default:
-                throw new RuntimeException("invalid cacheType:" + cacheType);
+                logger.error("invalid cacheType:" + cacheType);
             }
         } catch (IOException e) {
             throw new RuntimeException("error " + log, e);


[49/50] [abbrv] kylin git commit: KYLIN-1726 Scalable streaming cubing

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
index d4308db..7db8285 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
@@ -1,37 +1,20 @@
 /*
- *
- *
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *
- *  contributor license agreements. See the NOTICE file distributed with
- *
- *  this work for additional information regarding copyright ownership.
- *
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *
- *  (the "License"); you may not use this file except in compliance with
- *
- *  the License. You may obtain a copy of the License at
- *
- *
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *
- *
- *  Unless required by applicable law or agreed to in writing, software
- *
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *
- *  See the License for the specific language governing permissions and
- *
- *  limitations under the License.
- *
- * /
- */
-
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
 package org.apache.kylin.source.kafka;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/TopicMeta.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/TopicMeta.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/TopicMeta.java
index 4145ef6..d84d3db 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/TopicMeta.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/TopicMeta.java
@@ -1,37 +1,20 @@
 /*
- *
- *
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *
- *  contributor license agreements. See the NOTICE file distributed with
- *
- *  this work for additional information regarding copyright ownership.
- *
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *
- *  (the "License"); you may not use this file except in compliance with
- *
- *  the License. You may obtain a copy of the License at
- *
- *
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *
- *
- *  Unless required by applicable law or agreed to in writing, software
- *
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *
- *  See the License for the specific language governing permissions and
- *
- *  limitations under the License.
- *
- * /
- */
-
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
 package org.apache.kylin.source.kafka;
 
 import java.util.Collections;

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/UpdateTimeRangeStep.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/UpdateTimeRangeStep.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/UpdateTimeRangeStep.java
new file mode 100644
index 0000000..bb64bf9
--- /dev/null
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/UpdateTimeRangeStep.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+package org.apache.kylin.source.kafka;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.commons.lang3.time.FastDateFormat;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.kylin.common.util.DateFormat;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.CubeUpdate;
+import org.apache.kylin.engine.mr.HadoopUtil;
+import org.apache.kylin.engine.mr.common.BatchConstants;
+import org.apache.kylin.engine.mr.steps.CubingExecutableUtil;
+import org.apache.kylin.job.exception.ExecuteException;
+import org.apache.kylin.job.execution.AbstractExecutable;
+import org.apache.kylin.job.execution.ExecutableContext;
+import org.apache.kylin.job.execution.ExecuteResult;
+import org.apache.kylin.metadata.datatype.DataType;
+import org.apache.kylin.metadata.model.TblColRef;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ */
+public class UpdateTimeRangeStep extends AbstractExecutable {
+
+    private static final Logger logger = LoggerFactory.getLogger(UpdateTimeRangeStep.class);
+
+    public UpdateTimeRangeStep() {
+        super();
+    }
+
+    @Override
+    protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
+        final CubeManager cubeManager = CubeManager.getInstance(context.getConfig());
+        final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams()));
+        final CubeSegment segment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));
+        final TblColRef partitionCol = segment.getCubeDesc().getModel().getPartitionDesc().getPartitionDateColumnRef();
+        final String outputPath = this.getParams().get(BatchConstants.CFG_OUTPUT_PATH);
+        final Path outputFile = new Path(outputPath, partitionCol.getName());
+
+        String minValue = null, maxValue = null, currentValue = null;
+        try (FileSystem fs = HadoopUtil.getFileSystem(outputPath); FSDataInputStream inputStream = fs.open(outputFile); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream))) {
+            minValue = currentValue = bufferedReader.readLine();
+            while (currentValue != null) {
+                maxValue = currentValue;
+                currentValue = bufferedReader.readLine();
+            }
+        } catch (IOException e) {
+            logger.error("fail to read file " + outputFile, e);
+            return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
+        }
+
+        final DataType partitionColType = partitionCol.getType();
+        FastDateFormat dateFormat;
+        if (partitionColType.isDate()) {
+            dateFormat = DateFormat.getDateFormat(DateFormat.DEFAULT_DATE_PATTERN);
+        } else if (partitionColType.isDatetime() || partitionColType.isTimestamp()) {
+            dateFormat = DateFormat.getDateFormat(DateFormat.DEFAULT_DATETIME_PATTERN_WITHOUT_MILLISECONDS);
+        } else if (partitionColType.isStringFamily()) {
+            String partitionDateFormat = segment.getCubeDesc().getModel().getPartitionDesc().getPartitionDateFormat();
+            if (StringUtils.isEmpty(partitionDateFormat)) {
+                partitionDateFormat = DateFormat.DEFAULT_DATE_PATTERN;
+            }
+            dateFormat = DateFormat.getDateFormat(partitionDateFormat);
+        } else {
+            return new ExecuteResult(ExecuteResult.State.ERROR, "Type " + partitionColType + " is not valid partition column type");
+        }
+
+        try {
+            long startTime = dateFormat.parse(minValue).getTime();
+            long endTime = dateFormat.parse(maxValue).getTime();
+            CubeUpdate cubeBuilder = new CubeUpdate(cube);
+            segment.setDateRangeStart(startTime);
+            segment.setDateRangeEnd(endTime);
+            cubeBuilder.setToUpdateSegs(segment);
+            cubeManager.updateCube(cubeBuilder);
+            return new ExecuteResult(ExecuteResult.State.SUCCEED, "succeed");
+        } catch (Exception e) {
+            logger.error("fail to update cube segment offset", e);
+            return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaClusterConfig.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaClusterConfig.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaClusterConfig.java
index 04a66f6..95349c2 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaClusterConfig.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaClusterConfig.java
@@ -22,6 +22,7 @@ import java.util.List;
 
 import javax.annotation.Nullable;
 
+import org.apache.kafka.common.protocol.SecurityProtocol;
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.RootPersistentEntity;
 import org.apache.kylin.common.persistence.Serializer;
@@ -67,7 +68,7 @@ public class KafkaClusterConfig extends RootPersistentEntity {
             @Nullable
             @Override
             public Broker apply(BrokerConfig input) {
-                return new Broker(input.getId(), input.getHost(), input.getPort());
+                return new Broker(input.getId(), input.getHost(), input.getPort(), SecurityProtocol.PLAINTEXT);
             }
         });
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableJob.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableJob.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableJob.java
new file mode 100644
index 0000000..decfb60
--- /dev/null
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableJob.java
@@ -0,0 +1,165 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.kafka.hadoop;
+
+import org.apache.kylin.source.kafka.util.KafkaClient;
+import org.apache.kylin.source.kafka.util.KafkaOffsetMapping;
+import org.apache.commons.cli.Options;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
+import org.apache.kylin.engine.mr.common.BatchConstants;
+import org.apache.kylin.metadata.model.SegmentStatusEnum;
+import org.apache.kylin.source.kafka.KafkaConfigManager;
+import org.apache.kylin.source.kafka.config.KafkaConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Map;
+
+/**
+ * Run a Hadoop Job to process the stream data in kafka;
+ * Modified from the kafka-hadoop-loader in https://github.com/amient/kafka-hadoop-loader
+ */
+public class KafkaFlatTableJob extends AbstractHadoopJob {
+    protected static final Logger logger = LoggerFactory.getLogger(KafkaFlatTableJob.class);
+
+    public static final String CONFIG_KAFKA_PARITION_MIN = "kafka.partition.min";
+    public static final String CONFIG_KAFKA_PARITION_MAX = "kafka.partition.max";
+    public static final String CONFIG_KAFKA_PARITION_START = "kafka.partition.start.";
+    public static final String CONFIG_KAFKA_PARITION_END = "kafka.partition.end.";
+
+    public static final String CONFIG_KAFKA_BROKERS = "kafka.brokers";
+    public static final String CONFIG_KAFKA_TOPIC = "kafka.topic";
+    public static final String CONFIG_KAFKA_TIMEOUT = "kafka.connect.timeout";
+    public static final String CONFIG_KAFKA_BUFFER_SIZE = "kafka.connect.buffer.size";
+    public static final String CONFIG_KAFKA_CONSUMER_GROUP = "kafka.consumer.group";
+    public static final String CONFIG_KAFKA_INPUT_FORMAT = "input.format";
+    public static final String CONFIG_KAFKA_PARSER_NAME = "kafka.parser.name";
+    @Override
+    public int run(String[] args) throws Exception {
+        Options options = new Options();
+
+        try {
+            options.addOption(OPTION_JOB_NAME);
+            options.addOption(OPTION_CUBE_NAME);
+            options.addOption(OPTION_OUTPUT_PATH);
+            options.addOption(OPTION_SEGMENT_NAME);
+            parseOptions(options, args);
+
+            job = Job.getInstance(getConf(), getOptionValue(OPTION_JOB_NAME));
+            String cubeName = getOptionValue(OPTION_CUBE_NAME);
+            Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
+
+            String segmentName = getOptionValue(OPTION_SEGMENT_NAME);
+
+            // ----------------------------------------------------------------------------
+            // add metadata to distributed cache
+            CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
+            CubeInstance cube = cubeMgr.getCube(cubeName);
+
+            job.getConfiguration().set(BatchConstants.CFG_CUBE_NAME, cubeName);
+            job.getConfiguration().set(BatchConstants.CFG_CUBE_SEGMENT_NAME, segmentName);
+            logger.info("Starting: " + job.getJobName());
+
+            setJobClasspath(job, cube.getConfig());
+
+            KafkaConfigManager kafkaConfigManager = KafkaConfigManager.getInstance(KylinConfig.getInstanceFromEnv());
+            KafkaConfig kafkaConfig = kafkaConfigManager.getKafkaConfig(cube.getFactTable());
+            String brokers = KafkaClient.getKafkaBrokers(kafkaConfig);
+            String topic = kafkaConfig.getTopic();
+
+            if (brokers == null || brokers.length() == 0 || topic == null) {
+                throw new IllegalArgumentException("Invalid Kafka information, brokers " + brokers + ", topic " + topic);
+            }
+
+            job.getConfiguration().set(CONFIG_KAFKA_BROKERS, brokers);
+            job.getConfiguration().set(CONFIG_KAFKA_TOPIC, topic);
+            job.getConfiguration().set(CONFIG_KAFKA_TIMEOUT, String.valueOf(kafkaConfig.getTimeout()));
+            job.getConfiguration().set(CONFIG_KAFKA_BUFFER_SIZE, String.valueOf(kafkaConfig.getBufferSize()));
+            job.getConfiguration().set(CONFIG_KAFKA_INPUT_FORMAT, "json");
+            job.getConfiguration().set(BatchConstants.CFG_CUBE_NAME, cubeName);
+            job.getConfiguration().set(BatchConstants.CFG_CUBE_SEGMENT_NAME, segmentName);
+            job.getConfiguration().set(CONFIG_KAFKA_PARSER_NAME, kafkaConfig.getParserName());
+            job.getConfiguration().set(CONFIG_KAFKA_CONSUMER_GROUP, cubeName); // use cubeName as consumer group name
+            setupMapper(cube.getSegment(segmentName, SegmentStatusEnum.NEW));
+            job.setNumReduceTasks(0);
+            FileOutputFormat.setOutputPath(job, output);
+            FileOutputFormat.setCompressOutput(job, true);
+            org.apache.log4j.Logger.getRootLogger().info("Output hdfs location: " + output);
+            org.apache.log4j.Logger.getRootLogger().info("Output hdfs compression: " + true);
+            job.getConfiguration().set(BatchConstants.CFG_OUTPUT_PATH, output.toString());
+
+            deletePath(job.getConfiguration(), output);
+
+            attachKylinPropsAndMetadata(cube, job.getConfiguration());
+
+            return waitForCompletion(job);
+
+        } catch (Exception e) {
+            logger.error("error in KafkaFlatTableJob", e);
+            printUsage(options);
+            throw e;
+        } finally {
+            if (job != null)
+                cleanupTempConfFile(job.getConfiguration());
+        }
+
+    }
+
+    private void setupMapper(CubeSegment cubeSeg) throws IOException {
+        // set the segment's offset info to job conf
+        Map<Integer, Long> offsetStart = KafkaOffsetMapping.parseOffsetStart(cubeSeg);
+        Map<Integer, Long> offsetEnd = KafkaOffsetMapping.parseOffsetEnd(cubeSeg);
+
+        Integer minPartition = Collections.min(offsetStart.keySet());
+        Integer maxPartition = Collections.max(offsetStart.keySet());
+        job.getConfiguration().set(CONFIG_KAFKA_PARITION_MIN, minPartition.toString());
+        job.getConfiguration().set(CONFIG_KAFKA_PARITION_MAX, maxPartition.toString());
+
+        for(Integer partition: offsetStart.keySet()) {
+            job.getConfiguration().set(CONFIG_KAFKA_PARITION_START + partition, offsetStart.get(partition).toString());
+            job.getConfiguration().set(CONFIG_KAFKA_PARITION_END + partition, offsetEnd.get(partition).toString());
+        }
+
+        job.setMapperClass(KafkaFlatTableMapper.class);
+        job.setInputFormatClass(KafkaInputFormat.class);
+        job.setOutputKeyClass(Text.class);
+        job.setOutputValueClass(Text.class);
+        job.setOutputFormatClass(SequenceFileOutputFormat.class);
+        job.setNumReduceTasks(0);
+    }
+
+    public static void main(String[] args) throws Exception {
+        KafkaFlatTableJob job = new KafkaFlatTableJob();
+        int exitCode = ToolRunner.run(job, args);
+        System.exit(exitCode);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableMapper.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableMapper.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableMapper.java
new file mode 100644
index 0000000..995b2d4
--- /dev/null
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableMapper.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.kafka.hadoop;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.kylin.common.util.Bytes;
+import org.apache.kylin.engine.mr.KylinMapper;
+
+public class KafkaFlatTableMapper extends KylinMapper<LongWritable, BytesWritable, Text, Text> {
+
+    private Text outKey = new Text();
+    private Text outValue = new Text();
+
+    @Override
+    protected void setup(Context context) throws IOException {
+        Configuration conf = context.getConfiguration();
+        bindCurrentConfiguration(conf);
+    }
+
+    @Override
+    public void map(LongWritable key, BytesWritable value, Context context) throws IOException {
+        try {
+            outKey.set(Bytes.toBytes(key.get()));
+            outValue.set(value.getBytes(), 0, value.getLength());
+            context.write(outKey, outValue);
+        } catch (InterruptedException e) {
+            e.printStackTrace();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputFormat.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputFormat.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputFormat.java
new file mode 100644
index 0000000..81f6bac
--- /dev/null
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputFormat.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.kafka.hadoop;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Maps;
+import org.apache.kylin.source.kafka.util.KafkaClient;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.InputFormat;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.PartitionInfo;
+
+/**
+ * Convert Kafka topic to Hadoop InputFormat
+ * Modified from the kafka-hadoop-loader in https://github.com/amient/kafka-hadoop-loader
+ */
+public class KafkaInputFormat extends InputFormat<LongWritable, BytesWritable> {
+
+    @Override
+    public List<InputSplit> getSplits(JobContext context) throws IOException, InterruptedException {
+        Configuration conf = context.getConfiguration();
+
+        String brokers = conf.get(KafkaFlatTableJob.CONFIG_KAFKA_BROKERS);
+        String inputTopic = conf.get(KafkaFlatTableJob.CONFIG_KAFKA_TOPIC);
+        String consumerGroup = conf.get(KafkaFlatTableJob.CONFIG_KAFKA_CONSUMER_GROUP);
+        Integer partitionMin = Integer.valueOf(conf.get(KafkaFlatTableJob.CONFIG_KAFKA_PARITION_MIN));
+        Integer partitionMax = Integer.valueOf(conf.get(KafkaFlatTableJob.CONFIG_KAFKA_PARITION_MAX));
+
+        Map<Integer, Long> startOffsetMap = Maps.newHashMap();
+        Map<Integer, Long> endOffsetMap = Maps.newHashMap();
+        for (int i = partitionMin; i <= partitionMax; i++) {
+            String start = conf.get(KafkaFlatTableJob.CONFIG_KAFKA_PARITION_START + i);
+            String end = conf.get(KafkaFlatTableJob.CONFIG_KAFKA_PARITION_END + i);
+            if (start != null && end != null) {
+                startOffsetMap.put(i, Long.valueOf(start));
+                endOffsetMap.put(i, Long.valueOf(end));
+            }
+        }
+
+        List<InputSplit> splits = new ArrayList<InputSplit>();
+        try (KafkaConsumer<String, String> consumer = KafkaClient.getKafkaConsumer(brokers, consumerGroup, null)) {
+            List<PartitionInfo> partitionInfos = consumer.partitionsFor(inputTopic);
+            Preconditions.checkArgument(partitionInfos.size() == startOffsetMap.size(), "partition number mismatch with server side");
+            for (int i = 0; i < partitionInfos.size(); i++) {
+                PartitionInfo partition = partitionInfos.get(i);
+                int partitionId = partition.partition();
+                if (startOffsetMap.containsKey(partitionId) == false) {
+                    throw new IllegalStateException("Partition '" + partitionId + "' not exists.");
+                }
+
+                if (endOffsetMap.get(partitionId) >  startOffsetMap.get(partitionId)) {
+                    InputSplit split = new KafkaInputSplit(
+                            brokers, inputTopic,
+                            partitionId,
+                            startOffsetMap.get(partitionId), endOffsetMap.get(partitionId)
+                    );
+                    splits.add(split);
+                }
+            }
+        }
+        return splits;
+    }
+
+    @Override
+    public RecordReader<LongWritable, BytesWritable> createRecordReader(
+            InputSplit arg0, TaskAttemptContext arg1) throws IOException,
+            InterruptedException {
+        return new KafkaInputRecordReader();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java
new file mode 100644
index 0000000..f67fef5
--- /dev/null
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java
@@ -0,0 +1,166 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.kafka.hadoop;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Iterator;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.kafka.clients.consumer.Consumer;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.common.TopicPartition;
+import org.apache.kylin.common.util.Bytes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Convert Kafka topic to Hadoop InputFormat
+ * Modified from the kafka-hadoop-loader in https://github.com/amient/kafka-hadoop-loader
+ */
+public class KafkaInputRecordReader extends RecordReader<LongWritable, BytesWritable> {
+
+    static Logger log = LoggerFactory.getLogger(KafkaInputRecordReader.class);
+
+    private Configuration conf;
+
+    private KafkaInputSplit split;
+    private Consumer consumer;
+    private String brokers;
+    private String topic;
+
+    private int partition;
+    private long earliestOffset;
+    private long watermark;
+    private long latestOffset;
+
+    private ConsumerRecords<String, String> messages;
+    private Iterator<ConsumerRecord<String, String>> iterator;
+    private LongWritable key;
+    private BytesWritable value;
+
+    private long timeOut = 60000;
+    private long bufferSize = 65536;
+
+    private long numProcessedMessages = 0L;
+
+    @Override
+    public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
+        initialize(split, context.getConfiguration());
+    }
+
+    public void initialize(InputSplit split, Configuration conf) throws IOException, InterruptedException {
+        this.conf = conf;
+        this.split = (KafkaInputSplit) split;
+        brokers = this.split.getBrokers();
+        topic = this.split.getTopic();
+        partition = this.split.getPartition();
+        watermark = this.split.getOffsetStart();
+
+        if (conf.get(KafkaFlatTableJob.CONFIG_KAFKA_TIMEOUT) != null) {
+            timeOut = Long.parseLong(conf.get(KafkaFlatTableJob.CONFIG_KAFKA_TIMEOUT));
+        }
+        if (conf.get(KafkaFlatTableJob.CONFIG_KAFKA_BUFFER_SIZE) != null) {
+            bufferSize = Long.parseLong(conf.get(KafkaFlatTableJob.CONFIG_KAFKA_BUFFER_SIZE));
+        }
+
+        String consumerGroup = conf.get(KafkaFlatTableJob.CONFIG_KAFKA_CONSUMER_GROUP);
+        consumer = org.apache.kylin.source.kafka.util.KafkaClient.getKafkaConsumer(brokers, consumerGroup, null);
+
+        earliestOffset = this.split.getOffsetStart();
+        latestOffset = this.split.getOffsetEnd();
+        TopicPartition topicPartition = new TopicPartition(topic, partition);
+        consumer.assign(Arrays.asList(topicPartition));
+        log.info("Split {} Topic: {} Broker: {} Partition: {} Start: {} End: {}", new Object[] { this.split, topic, this.split.getBrokers(), partition, earliestOffset, latestOffset });
+    }
+
+    @Override
+    public boolean nextKeyValue() throws IOException, InterruptedException {
+        if (key == null) {
+            key = new LongWritable();
+        }
+        if (value == null) {
+            value = new BytesWritable();
+        }
+
+        if (messages == null) {
+            log.info("{} fetching offset {} ", topic + ":" + split.getBrokers() + ":" + partition, watermark);
+            TopicPartition topicPartition = new TopicPartition(topic, partition);
+            consumer.seek(topicPartition, watermark);
+            messages = consumer.poll(timeOut);
+            iterator = messages.iterator();
+            if (!iterator.hasNext()) {
+                log.info("No more messages, stop");
+                throw new IOException(String.format("Unexpected ending of stream, expected ending offset %d, but end at %d", latestOffset, watermark));
+            }
+        }
+
+        if (iterator.hasNext()) {
+            ConsumerRecord<String, String> message = iterator.next();
+            if (message.offset() >= latestOffset) {
+                log.info("Reach the end offset, stop reading.");
+                return false;
+            }
+            key.set(message.offset());
+            byte[] valuebytes = Bytes.toBytes(message.value());
+            value.set(valuebytes, 0, valuebytes.length);
+            watermark = message.offset() + 1;
+            numProcessedMessages++;
+            if (!iterator.hasNext()) {
+                messages = null;
+                iterator = null;
+            }
+            return true;
+        }
+
+        log.error("Unexpected iterator end.");
+        throw new IOException(String.format("Unexpected ending of stream, expected ending offset %d, but end at %d", latestOffset, watermark));
+    }
+
+    @Override
+    public LongWritable getCurrentKey() throws IOException, InterruptedException {
+        return key;
+    }
+
+    @Override
+    public BytesWritable getCurrentValue() throws IOException, InterruptedException {
+        return value;
+    }
+
+    @Override
+    public float getProgress() throws IOException, InterruptedException {
+        if (watermark >= latestOffset || earliestOffset == latestOffset) {
+            return 1.0f;
+        }
+        return Math.min(1.0f, (watermark - earliestOffset) / (float) (latestOffset - earliestOffset));
+    }
+
+    @Override
+    public void close() throws IOException {
+        log.info("{} num. processed messages {} ", topic + ":" + split.getBrokers() + ":" + partition, numProcessedMessages);
+        consumer.close();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputSplit.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputSplit.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputSplit.java
new file mode 100644
index 0000000..3261399
--- /dev/null
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputSplit.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.kafka.hadoop;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.mapreduce.InputSplit;
+
+/**
+ * Convert Kafka topic to Hadoop InputFormat
+ * Modified from the kafka-hadoop-loader in https://github.com/amient/kafka-hadoop-loader
+ */
+public class KafkaInputSplit extends InputSplit implements Writable {
+
+    private String brokers;
+    private String topic;
+    private int partition;
+    private long offsetStart;
+    private long offsetEnd;
+
+    public KafkaInputSplit() {
+    }
+
+    public KafkaInputSplit(String brokers, String topic, int partition, long offsetStart, long offsetEnd) {
+        this.brokers = brokers;
+        this.topic = topic;
+        this.partition = partition;
+        this.offsetStart = offsetStart;
+        this.offsetEnd = offsetEnd;
+    }
+
+    public void readFields(DataInput in) throws IOException {
+        brokers = Text.readString(in);
+        topic = Text.readString(in);
+        partition = in.readInt();
+        offsetStart = in.readLong();
+        offsetEnd = in.readLong();
+    }
+
+    public void write(DataOutput out) throws IOException {
+        Text.writeString(out, brokers);
+        Text.writeString(out, topic);
+        out.writeInt(partition);
+        out.writeLong(offsetStart);
+        out.writeLong(offsetEnd);
+    }
+
+    @Override
+    public long getLength() throws IOException, InterruptedException {
+        return Long.MAX_VALUE;
+    }
+
+    @Override
+    public String[] getLocations() throws IOException, InterruptedException {
+        return new String[]{brokers};
+    }
+
+    public int getPartition() {
+        return partition;
+    }
+
+    public String getTopic() {
+        return topic;
+    }
+
+    public String getBrokers() {
+        return brokers;
+    }
+
+    public long getOffsetStart() {
+        return offsetStart;
+    }
+
+    public long getOffsetEnd() {
+        return offsetEnd;
+    }
+
+    @Override
+    public String toString() {
+        return brokers + "-" + topic + "-" + partition + "-" + offsetStart + "-" + offsetEnd;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaClient.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaClient.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaClient.java
new file mode 100644
index 0000000..640cc53
--- /dev/null
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaClient.java
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+package org.apache.kylin.source.kafka.util;
+
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.common.TopicPartition;
+import org.apache.kylin.source.kafka.config.BrokerConfig;
+import org.apache.kylin.source.kafka.config.KafkaClusterConfig;
+import org.apache.kylin.source.kafka.config.KafkaConfig;
+
+import java.util.Arrays;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ */
+public class KafkaClient {
+
+    public static KafkaConsumer getKafkaConsumer(String brokers, String consumerGroup, Properties properties) {
+        Properties props = constructDefaultKafkaConsumerProperties(brokers, consumerGroup, properties);
+        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
+        return consumer;
+    }
+
+    public static KafkaProducer getKafkaProducer(String brokers, Properties properties) {
+        Properties props = constructDefaultKafkaProducerProperties(brokers, properties);
+        KafkaProducer<String, String> producer = new KafkaProducer<String, String>(props);
+        return producer;
+    }
+
+    private static Properties constructDefaultKafkaProducerProperties(String brokers, Properties properties){
+        Properties props = new Properties();
+        props.put("bootstrap.servers", brokers);
+        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
+        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
+        props.put("acks", "1");
+        props.put("buffer.memory", 33554432);
+        props.put("retries", 0);
+        props.put("batch.size", 16384);
+        props.put("linger.ms", 50);
+        props.put("timeout.ms", "30000");
+        if (properties != null) {
+            for (Map.Entry entry : properties.entrySet()) {
+                props.put(entry.getKey(), entry.getValue());
+            }
+        }
+        return props;
+    }
+
+    private static Properties constructDefaultKafkaConsumerProperties(String brokers, String consumerGroup, Properties properties) {
+        Properties props = new Properties();
+        props.put("bootstrap.servers", brokers);
+        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
+        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
+        props.put("group.id", consumerGroup);
+        props.put("session.timeout.ms", "30000");
+        props.put("enable.auto.commit", "false");
+        if (properties != null) {
+            for (Map.Entry entry : properties.entrySet()) {
+                props.put(entry.getKey(), entry.getValue());
+            }
+        }
+        return props;
+    }
+
+    public static String getKafkaBrokers(KafkaConfig kafkaConfig) {
+        String brokers = null;
+        for (KafkaClusterConfig clusterConfig : kafkaConfig.getKafkaClusterConfigs()) {
+            for (BrokerConfig brokerConfig : clusterConfig.getBrokerConfigs()) {
+                if (brokers == null) {
+                    brokers = brokerConfig.getHost() + ":" + brokerConfig.getPort();
+                } else {
+                    brokers = brokers + "," + brokerConfig.getHost() + ":" + brokerConfig.getPort();
+                }
+            }
+        }
+        return brokers;
+    }
+
+    public static long getEarliestOffset(KafkaConsumer consumer, String topic, int partitionId) {
+
+        TopicPartition topicPartition = new TopicPartition(topic, partitionId);
+        consumer.assign(Arrays.asList(topicPartition));
+        consumer.seekToBeginning(Arrays.asList(topicPartition));
+
+        return consumer.position(topicPartition);
+    }
+
+    public static long getLatestOffset(KafkaConsumer consumer, String topic, int partitionId) {
+
+        TopicPartition topicPartition = new TopicPartition(topic, partitionId);
+        consumer.assign(Arrays.asList(topicPartition));
+        consumer.seekToEnd(Arrays.asList(topicPartition));
+
+        return consumer.position(topicPartition);
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaOffsetMapping.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaOffsetMapping.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaOffsetMapping.java
new file mode 100644
index 0000000..b46e57f
--- /dev/null
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaOffsetMapping.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+package org.apache.kylin.source.kafka.util;
+
+import com.google.common.collect.Maps;
+import org.apache.kylin.cube.CubeSegment;
+
+import java.util.Map;
+
+/**
+ */
+public class KafkaOffsetMapping {
+
+    public static final String OFFSET_START = "kafka.offset.start.";
+    public static final String OFFSET_END = "kafka.offset.end.";
+
+    /**
+     * Get the start offsets for each partition from a segment
+     *
+     * @param segment
+     * @return
+     */
+    public static Map<Integer, Long> parseOffsetStart(CubeSegment segment) {
+        return parseOffset(segment, OFFSET_START);
+    }
+
+    /**
+     * Get the end offsets for each partition from a segment
+     *
+     * @param segment
+     * @return
+     */
+    public static Map<Integer, Long> parseOffsetEnd(CubeSegment segment) {
+        return parseOffset(segment, OFFSET_END);
+    }
+
+    /**
+     * Save the partition start offset to cube segment
+     *
+     * @param segment
+     * @param offsetStart
+     */
+    public static void saveOffsetStart(CubeSegment segment, Map<Integer, Long> offsetStart) {
+        long sourceOffsetStart = 0;
+        for (Integer partition : offsetStart.keySet()) {
+            segment.getAdditionalInfo().put(OFFSET_START + partition, String.valueOf(offsetStart.get(partition)));
+            sourceOffsetStart += offsetStart.get(partition);
+        }
+
+        segment.setSourceOffsetStart(sourceOffsetStart);
+    }
+
+    /**
+     * Save the partition end offset to cube segment
+     *
+     * @param segment
+     * @param offsetEnd
+     */
+    public static void saveOffsetEnd(CubeSegment segment, Map<Integer, Long> offsetEnd) {
+        long sourceOffsetEnd = 0;
+        for (Integer partition : offsetEnd.keySet()) {
+            segment.getAdditionalInfo().put(OFFSET_END + partition, String.valueOf(offsetEnd.get(partition)));
+            sourceOffsetEnd += offsetEnd.get(partition);
+        }
+
+        segment.setSourceOffsetEnd(sourceOffsetEnd);
+    }
+
+    private static Map<Integer, Long> parseOffset(CubeSegment segment, String propertyPrefix) {
+        final Map<Integer, Long> offsetStartMap = Maps.newHashMap();
+        for (String key : segment.getAdditionalInfo().keySet()) {
+            if (key.startsWith(propertyPrefix)) {
+                Integer partition = Integer.valueOf(key.substring(propertyPrefix.length()));
+                Long offset = Long.valueOf(segment.getAdditionalInfo().get(key));
+                offsetStartMap.put(partition, offset);
+            }
+        }
+
+
+        return offsetStartMap;
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaRequester.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaRequester.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaRequester.java
index 58cba7d..ddc2eb7 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaRequester.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaRequester.java
@@ -1,37 +1,20 @@
 /*
- *
- *
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *
- *  contributor license agreements. See the NOTICE file distributed with
- *
- *  this work for additional information regarding copyright ownership.
- *
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *
- *  (the "License"); you may not use this file except in compliance with
- *
- *  the License. You may obtain a copy of the License at
- *
- *
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *
- *
- *  Unless required by applicable law or agreed to in writing, software
- *
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *
- *  See the License for the specific language governing permissions and
- *
- *  limitations under the License.
- *
- * /
- */
-
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
 package org.apache.kylin.source.kafka.util;
 
 import java.util.Collections;
@@ -42,6 +25,8 @@ import java.util.concurrent.ConcurrentMap;
 
 import javax.annotation.Nullable;
 
+import kafka.cluster.BrokerEndPoint;
+import org.apache.kafka.common.protocol.SecurityProtocol;
 import org.apache.kylin.source.kafka.TopicMeta;
 import org.apache.kylin.source.kafka.config.KafkaClusterConfig;
 import org.slf4j.Logger;
@@ -86,13 +71,14 @@ public final class KafkaRequester {
         if (consumerCache.containsKey(key)) {
             return consumerCache.get(key);
         } else {
-            consumerCache.putIfAbsent(key, new SimpleConsumer(broker.host(), broker.port(), timeout, bufferSize, clientId));
+            BrokerEndPoint brokerEndPoint = broker.getBrokerEndPoint(SecurityProtocol.PLAINTEXT);
+            consumerCache.putIfAbsent(key, new SimpleConsumer(brokerEndPoint.host(), brokerEndPoint.port(), timeout, bufferSize, clientId));
             return consumerCache.get(key);
         }
     }
 
     private static String createKey(Broker broker, int timeout, int bufferSize, String clientId) {
-        return broker.getConnectionString() + "_" + timeout + "_" + bufferSize + "_" + clientId;
+        return broker.getBrokerEndPoint(SecurityProtocol.PLAINTEXT).connectionString() + "_" + timeout + "_" + bufferSize + "_" + clientId;
     }
 
     public static TopicMeta getKafkaTopicMeta(KafkaClusterConfig kafkaClusterConfig) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaUtils.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaUtils.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaUtils.java
index 24eaa05..ee5bb20 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaUtils.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaUtils.java
@@ -22,6 +22,7 @@ import java.nio.ByteBuffer;
 import java.util.Iterator;
 import java.util.Map;
 
+import org.apache.kafka.common.protocol.SecurityProtocol;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.common.util.StreamingMessage;
 import org.apache.kylin.source.kafka.StreamingParser;
@@ -55,7 +56,7 @@ public final class KafkaUtils {
             if (partitionMetadata.errorCode() != 0) {
                 logger.warn("PartitionMetadata errorCode: " + partitionMetadata.errorCode());
             }
-            return partitionMetadata.leader();
+            return new Broker(partitionMetadata.leader(), SecurityProtocol.PLAINTEXT);
         } else {
             return null;
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
index c7de287..f285153 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
@@ -39,7 +39,7 @@ import org.apache.kylin.common.util.CompressionUtils;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.common.util.LoggableCachedThreadPool;
 import org.apache.kylin.common.util.Pair;
-import org.apache.kylin.cube.ISegment;
+import org.apache.kylin.metadata.model.ISegment;
 import org.apache.kylin.cube.cuboid.Cuboid;
 import org.apache.kylin.gridtable.GTInfo;
 import org.apache.kylin.gridtable.GTScanRequest;

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
index c318cba..da087c9 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
@@ -31,7 +31,7 @@ import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.cube.CubeSegment;
-import org.apache.kylin.cube.ISegment;
+import org.apache.kylin.metadata.model.ISegment;
 import org.apache.kylin.cube.cuboid.Cuboid;
 import org.apache.kylin.cube.kv.FuzzyKeyEncoder;
 import org.apache.kylin.cube.kv.FuzzyMaskEncoder;

http://git-wip-us.apache.org/repos/asf/kylin/blob/81c7323b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
index f1e5dab..5692000 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
@@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.client.Scan;
 import org.apache.kylin.common.util.BytesUtil;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.common.util.ShardingHash;
-import org.apache.kylin.cube.ISegment;
+import org.apache.kylin.metadata.model.ISegment;
 import org.apache.kylin.cube.cuboid.Cuboid;
 import org.apache.kylin.cube.kv.RowConstants;
 import org.apache.kylin.dimension.DimensionEncoding;


[43/50] [abbrv] kylin git commit: KYLIN-1726 add test case BuildCubeWithStream2

Posted by sh...@apache.org.
KYLIN-1726 add test case BuildCubeWithStream2

Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/3e081b3f
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/3e081b3f
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/3e081b3f

Branch: refs/heads/KYLIN-1726
Commit: 3e081b3fbec4fc8a6cc4ddf8795d2fd581ae04f4
Parents: aff2df5
Author: shaofengshi <sh...@apache.org>
Authored: Wed Sep 14 16:27:33 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Wed Sep 14 16:34:36 2016 +0800

----------------------------------------------------------------------
 .../kylin/job/streaming/Kafka10DataLoader.java  |   4 -
 .../apache/kylin/common/KylinConfigBase.java    |   4 +
 .../java/org/apache/kylin/cube/CubeManager.java |  28 +-
 .../org/apache/kylin/job/dao/ExecutableDao.java |   1 +
 .../kylin/job/manager/ExecutableManager.java    |   2 +-
 .../streaming/cube/StreamingCubeBuilder.java    |   2 +-
 .../test_streaming_table_cube_desc.json         |   3 +-
 .../kylin/provision/BuildCubeWithStream.java    |  32 ++-
 .../kylin/provision/BuildCubeWithStream2.java   | 274 +++++++++++++++++++
 .../kylin/rest/controller/CubeController.java   |   8 +-
 .../apache/kylin/rest/service/JobService.java   |   4 +-
 .../kylin/source/kafka/SeekOffsetStep.java      |   7 +-
 12 files changed, 320 insertions(+), 49 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/3e081b3f/assembly/src/test/java/org/apache/kylin/job/streaming/Kafka10DataLoader.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/streaming/Kafka10DataLoader.java b/assembly/src/test/java/org/apache/kylin/job/streaming/Kafka10DataLoader.java
index a5132af..2b299cc 100644
--- a/assembly/src/test/java/org/apache/kylin/job/streaming/Kafka10DataLoader.java
+++ b/assembly/src/test/java/org/apache/kylin/job/streaming/Kafka10DataLoader.java
@@ -65,13 +65,9 @@ public class Kafka10DataLoader extends StreamDataLoader {
         props.put("retry.backoff.ms", "1000");
         KafkaProducer producer = KafkaClient.getKafkaProducer(brokerList, props);
 
-        int boundary = messages.size() / 10;
         for (int i = 0; i < messages.size(); ++i) {
             ProducerRecord<String, String> keyedMessage = new ProducerRecord<String, String>(clusterConfig.getTopic(), String.valueOf(i), messages.get(i));
             producer.send(keyedMessage);
-            if (i % boundary == 0) {
-                logger.info("sending " + i + " messages to " + this.toString());
-            }
         }
         logger.info("sent " + messages.size() + " messages to " + this.toString());
         producer.close();

http://git-wip-us.apache.org/repos/asf/kylin/blob/3e081b3f/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index fafb1fc..3b06ed8 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -809,4 +809,8 @@ abstract public class KylinConfigBase implements Serializable {
     public String getCreateFlatHiveTableMethod() {
         return getOptional("kylin.hive.create.flat.table.method", "1");
     }
+
+    public int getMaxBuildingSegments() {
+        return Integer.parseInt(getOptional("kylin.cube.building.segment.max", "1"));
+    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/3e081b3f/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
index d494fcc..57b9510 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
@@ -400,13 +400,8 @@ public class CubeManager implements IRealizationProvider {
     }
 
     public CubeSegment appendSegment(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset) throws IOException {
-        return appendSegment(cube, startDate, endDate, startOffset, endOffset, true);
-    }
-
-    public CubeSegment appendSegment(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset, boolean strictChecking) throws IOException {
 
-        if (strictChecking)
-            checkNoBuildingSegment(cube);
+        checkBuildingSegment(cube);
 
         if (cube.getDescriptor().getModel().getPartitionDesc().isPartitioned()) {
             // try figure out a reasonable start if missing
@@ -436,12 +431,9 @@ public class CubeManager implements IRealizationProvider {
         updateCube(cubeBuilder);
         return newSegment;
     }
-    public CubeSegment refreshSegment(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset) throws IOException {
-        return refreshSegment(cube, startDate, endDate, startOffset, endOffset, true);
-    }
 
-    public CubeSegment refreshSegment(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset, boolean strictChecking) throws IOException {
-        checkNoBuildingSegment(cube);
+    public CubeSegment refreshSegment(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset) throws IOException {
+        checkBuildingSegment(cube);
 
         CubeSegment newSegment = newSegment(cube, startDate, endDate, startOffset, endOffset);
 
@@ -462,7 +454,7 @@ public class CubeManager implements IRealizationProvider {
         if (startDate >= endDate && startOffset >= endOffset)
             throw new IllegalArgumentException("Invalid merge range");
 
-        checkNoBuildingSegment(cube);
+        checkBuildingSegment(cube);
         checkCubeIsPartitioned(cube);
 
         boolean isOffsetsOn = cube.getSegments().get(0).isSourceOffsetsOn();
@@ -588,9 +580,10 @@ public class CubeManager implements IRealizationProvider {
         }
     }
 
-    private void checkNoBuildingSegment(CubeInstance cube) {
-        if (cube.getBuildingSegments().size() > 0) {
-            throw new IllegalStateException("There is already a building segment!");
+    private void checkBuildingSegment(CubeInstance cube) {
+        int maxBuldingSeg = cube.getConfig().getMaxBuildingSegments();
+        if (cube.getBuildingSegments().size() >= maxBuldingSeg) {
+            throw new IllegalStateException("There is already " + cube.getBuildingSegments().size() + " building segment; ");
         }
     }
 
@@ -729,8 +722,9 @@ public class CubeManager implements IRealizationProvider {
         }
 
         for (CubeSegment seg : tobe) {
-            if (isReady(seg) == false)
-                throw new IllegalStateException("For cube " + cube + ", segment " + seg + " should be READY but is not");
+            if (isReady(seg) == false) {
+                logger.warn("For cube " + cube + ", segment " + seg + " isn't READY yet.");
+            }
         }
 
         List<CubeSegment> toRemoveSegs = Lists.newArrayList();

http://git-wip-us.apache.org/repos/asf/kylin/blob/3e081b3f/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableDao.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableDao.java b/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableDao.java
index 8808a56..5cae5ac 100644
--- a/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableDao.java
+++ b/core-job/src/main/java/org/apache/kylin/job/dao/ExecutableDao.java
@@ -207,6 +207,7 @@ public class ExecutableDao {
     }
 
     public void updateJobOutput(ExecutableOutputPO output) throws PersistentException {
+        logger.debug("updating job output, id: " + output.getUuid());
         try {
             final long ts = writeJobOutputResource(pathOfJobOutput(output.getUuid()), output);
             output.setLastModified(ts);

http://git-wip-us.apache.org/repos/asf/kylin/blob/3e081b3f/core-job/src/main/java/org/apache/kylin/job/manager/ExecutableManager.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/manager/ExecutableManager.java b/core-job/src/main/java/org/apache/kylin/job/manager/ExecutableManager.java
index 3a19486..d42b924 100644
--- a/core-job/src/main/java/org/apache/kylin/job/manager/ExecutableManager.java
+++ b/core-job/src/main/java/org/apache/kylin/job/manager/ExecutableManager.java
@@ -278,7 +278,7 @@ public class ExecutableManager {
             ExecutableState oldStatus = ExecutableState.valueOf(jobOutput.getStatus());
             if (newStatus != null && oldStatus != newStatus) {
                 if (!ExecutableState.isValidStateTransfer(oldStatus, newStatus)) {
-                    throw new IllegalStateTranferException("there is no valid state transfer from:" + oldStatus + " to:" + newStatus);
+                    throw new IllegalStateTranferException("there is no valid state transfer from:" + oldStatus + " to:" + newStatus + ", job id: " + jobId);
                 }
                 jobOutput.setStatus(newStatus.toString());
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/3e081b3f/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
index 180f0b8..a42ec05 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
@@ -119,7 +119,7 @@ public class StreamingCubeBuilder implements StreamingBatchBuilder {
         CubeManager cubeManager = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
         final CubeInstance cubeInstance = cubeManager.reloadCubeLocal(cubeName);
         try {
-            CubeSegment segment = cubeManager.appendSegment(cubeInstance, streamingBatch.getTimeRange().getFirst(), streamingBatch.getTimeRange().getSecond(), 0, 0, false);
+            CubeSegment segment = cubeManager.appendSegment(cubeInstance, streamingBatch.getTimeRange().getFirst(), streamingBatch.getTimeRange().getSecond(), 0, 0);
             segment.setLastBuildJobID(segment.getUuid()); // give a fake job id
             segment.setInputRecords(streamingBatch.getMessages().size());
             segment.setLastBuildTime(System.currentTimeMillis());

http://git-wip-us.apache.org/repos/asf/kylin/blob/3e081b3f/examples/test_case_data/localmeta/cube_desc/test_streaming_table_cube_desc.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/cube_desc/test_streaming_table_cube_desc.json b/examples/test_case_data/localmeta/cube_desc/test_streaming_table_cube_desc.json
index ef10c1e..8279417 100644
--- a/examples/test_case_data/localmeta/cube_desc/test_streaming_table_cube_desc.json
+++ b/examples/test_case_data/localmeta/cube_desc/test_streaming_table_cube_desc.json
@@ -106,7 +106,8 @@
     }
   } ],
   "override_kylin_properties": {
-    "kylin.cube.algorithm": "inmem"
+    "kylin.cube.algorithm": "inmem",
+    "kylin.cube.building.segment.max": "3"
   },
   "notify_list" : [ ],
   "status_need_notify" : [ ],

http://git-wip-us.apache.org/repos/asf/kylin/blob/3e081b3f/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
index 2c09f48..53c9dce 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
@@ -138,15 +138,21 @@ public class BuildCubeWithStream {
 
         int numberOfRecrods1 = 10000;
         generateStreamData(date1, date2, numberOfRecrods1);
-        buildSegment(cubeName, 0, Long.MAX_VALUE);
-
+        ExecutableState result = buildSegment(cubeName, 0, Long.MAX_VALUE);
+        Assert.assertTrue(result == ExecutableState.SUCCEED);
         long date3 = f.parse("2013-04-01").getTime();
-        int numberOfRecrods2 = 5000;
-        generateStreamData(date2, date3, numberOfRecrods2);
-        buildSegment(cubeName, 0, Long.MAX_VALUE);
+        int numberOfRecords2 = 5000;
+        generateStreamData(date2, date3, numberOfRecords2);
+        result = buildSegment(cubeName, 0, Long.MAX_VALUE);
+        Assert.assertTrue(result == ExecutableState.SUCCEED);
+
+        //empty build
+        result = buildSegment(cubeName, 0, Long.MAX_VALUE);
+        Assert.assertTrue(result == ExecutableState.DISCARDED);
 
         //merge
-        mergeSegment(cubeName, 0, 15000);
+        result = mergeSegment(cubeName, 0, 15000);
+        Assert.assertTrue(result == ExecutableState.SUCCEED);
 
         List<CubeSegment> segments = cubeManager.getCube(cubeName).getSegments();
         Assert.assertTrue(segments.size() == 1);
@@ -160,16 +166,16 @@ public class BuildCubeWithStream {
 
     }
 
-    private String mergeSegment(String cubeName, long startOffset, long endOffset) throws Exception {
+    private ExecutableState mergeSegment(String cubeName, long startOffset, long endOffset) throws Exception {
         CubeSegment segment = cubeManager.mergeSegments(cubeManager.getCube(cubeName), 0, 0, startOffset, endOffset, false);
         DefaultChainedExecutable job = EngineFactory.createBatchMergeJob(segment, "TEST");
         jobService.addJob(job);
         waitForJob(job.getId());
-        return job.getId();
+        return job.getStatus();
     }
 
     private String refreshSegment(String cubeName, long startOffset, long endOffset, HashMap<String, String> partitionOffsetMap) throws Exception {
-        CubeSegment segment = cubeManager.refreshSegment(cubeManager.getCube(cubeName), 0, 0, startOffset, endOffset, false);
+        CubeSegment segment = cubeManager.refreshSegment(cubeManager.getCube(cubeName), 0, 0, startOffset, endOffset);
         segment.setAdditionalInfo(partitionOffsetMap);
         CubeInstance cubeInstance = cubeManager.getCube(cubeName);
         CubeUpdate cubeBuilder = new CubeUpdate(cubeInstance);
@@ -182,12 +188,12 @@ public class BuildCubeWithStream {
         return job.getId();
     }
 
-    private String buildSegment(String cubeName, long startOffset, long endOffset) throws Exception {
-        CubeSegment segment = cubeManager.appendSegment(cubeManager.getCube(cubeName), 0, 0, startOffset, endOffset, false);
+    private ExecutableState buildSegment(String cubeName, long startOffset, long endOffset) throws Exception {
+        CubeSegment segment = cubeManager.appendSegment(cubeManager.getCube(cubeName), 0, 0, startOffset, endOffset);
         DefaultChainedExecutable job = EngineFactory.createBatchCubingJob(segment, "TEST");
         jobService.addJob(job);
         waitForJob(job.getId());
-        return job.getId();
+        return job.getStatus();
     }
 
     protected void deployEnv() throws IOException {
@@ -217,7 +223,7 @@ public class BuildCubeWithStream {
     protected void waitForJob(String jobId) {
         while (true) {
             AbstractExecutable job = jobService.getJob(jobId);
-            if (job.getStatus() == ExecutableState.SUCCEED || job.getStatus() == ExecutableState.ERROR) {
+            if (job.getStatus() == ExecutableState.SUCCEED || job.getStatus() == ExecutableState.ERROR || job.getStatus() == ExecutableState.DISCARDED) {
                 break;
             } else {
                 try {

http://git-wip-us.apache.org/repos/asf/kylin/blob/3e081b3f/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream2.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream2.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream2.java
new file mode 100644
index 0000000..d48a473
--- /dev/null
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream2.java
@@ -0,0 +1,274 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.provision;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.List;
+import java.util.Random;
+import java.util.TimeZone;
+import java.util.UUID;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.FutureTask;
+import java.util.concurrent.TimeUnit;
+
+import com.google.common.collect.Lists;
+import org.I0Itec.zkclient.ZkConnection;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.kafka.common.requests.MetadataResponse;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.ClassUtil;
+import org.apache.kylin.common.util.HBaseMetadataTestCase;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.CubeUpdate;
+import org.apache.kylin.engine.EngineFactory;
+import org.apache.kylin.engine.streaming.StreamingConfig;
+import org.apache.kylin.engine.streaming.StreamingManager;
+import org.apache.kylin.job.DeployUtil;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.execution.AbstractExecutable;
+import org.apache.kylin.job.execution.DefaultChainedExecutable;
+import org.apache.kylin.job.execution.ExecutableState;
+import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
+import org.apache.kylin.job.manager.ExecutableManager;
+import org.apache.kylin.job.streaming.Kafka10DataLoader;
+import org.apache.kylin.metadata.model.SegmentStatusEnum;
+import org.apache.kylin.source.kafka.KafkaConfigManager;
+import org.apache.kylin.source.kafka.config.BrokerConfig;
+import org.apache.kylin.source.kafka.config.KafkaConfig;
+import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
+import org.junit.Assert;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static java.lang.Thread.sleep;
+
+/**
+ *  for streaming cubing case "test_streaming_table", using multiple threads to build it concurrently.
+ */
+public class BuildCubeWithStream2 {
+
+    private static final Logger logger = LoggerFactory.getLogger(BuildCubeWithStream2.class);
+
+    private CubeManager cubeManager;
+    private DefaultScheduler scheduler;
+    protected ExecutableManager jobService;
+    private static final String cubeName = "test_streaming_table_cube";
+
+    private KafkaConfig kafkaConfig;
+    private MockKafka kafkaServer;
+    private static boolean generateData = true;
+
+    public void before() throws Exception {
+        deployEnv();
+
+        final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+        jobService = ExecutableManager.getInstance(kylinConfig);
+        scheduler = DefaultScheduler.createInstance();
+        scheduler.init(new JobEngineConfig(kylinConfig), new ZookeeperJobLock());
+        if (!scheduler.hasStarted()) {
+            throw new RuntimeException("scheduler has not been started");
+        }
+        cubeManager = CubeManager.getInstance(kylinConfig);
+
+        final CubeInstance cubeInstance = CubeManager.getInstance(kylinConfig).getCube(cubeName);
+        final String factTable = cubeInstance.getFactTable();
+
+        final StreamingManager streamingManager = StreamingManager.getInstance(kylinConfig);
+        final StreamingConfig streamingConfig = streamingManager.getStreamingConfig(factTable);
+        kafkaConfig = KafkaConfigManager.getInstance(kylinConfig).getKafkaConfig(streamingConfig.getName());
+
+        String topicName = UUID.randomUUID().toString();
+        String localIp = NetworkUtils.getLocalIp();
+        BrokerConfig brokerConfig = kafkaConfig.getKafkaClusterConfigs().get(0).getBrokerConfigs().get(0);
+        brokerConfig.setHost(localIp);
+        kafkaConfig.setTopic(topicName);
+        KafkaConfigManager.getInstance(kylinConfig).saveKafkaConfig(kafkaConfig);
+
+        startEmbeddedKafka(topicName, brokerConfig);
+    }
+
+    private void startEmbeddedKafka(String topicName, BrokerConfig brokerConfig) {
+        //Start mock Kakfa
+        String zkConnectionStr = "sandbox:2181";
+        ZkConnection zkConnection = new ZkConnection(zkConnectionStr);
+        // Assert.assertEquals(ZooKeeper.States.CONNECTED, zkConnection.getZookeeperState());
+        kafkaServer = new MockKafka(zkConnection, brokerConfig.getPort(), brokerConfig.getId());
+        kafkaServer.start();
+
+        kafkaServer.createTopic(topicName, 3, 1);
+        kafkaServer.waitTopicUntilReady(topicName);
+
+        MetadataResponse.TopicMetadata topicMetadata = kafkaServer.fetchTopicMeta(topicName);
+        Assert.assertEquals(topicName, topicMetadata.topic());
+    }
+
+    private void generateStreamData(long startTime, long endTime, int numberOfRecords) throws IOException {
+        if (numberOfRecords <= 0)
+            return;
+        Kafka10DataLoader dataLoader = new Kafka10DataLoader(kafkaConfig);
+        DeployUtil.prepareTestDataForStreamingCube(startTime, endTime, numberOfRecords, cubeName, dataLoader);
+        logger.info("Test data inserted into Kafka");
+    }
+
+    private void clearSegment(String cubeName) throws Exception {
+        CubeInstance cube = cubeManager.getCube(cubeName);
+        // remove all existing segments
+        CubeUpdate cubeBuilder = new CubeUpdate(cube);
+        cubeBuilder.setToRemoveSegs(cube.getSegments().toArray(new CubeSegment[cube.getSegments().size()]));
+        cubeManager.updateCube(cubeBuilder);
+    }
+
+    public void build() throws Exception {
+        clearSegment(cubeName);
+        SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
+        f.setTimeZone(TimeZone.getTimeZone("GMT"));
+        final long date1 = 0;
+        final long date2 = f.parse("2013-01-01").getTime();
+
+        new Thread(new Runnable() {
+            @Override
+            public void run() {
+
+                Random rand = new Random();
+                while (generateData == true) {
+                    try {
+                        generateStreamData(date1, date2, rand.nextInt(100));
+                        sleep(rand.nextInt(rand.nextInt(100 * 1000))); // wait random time, from 0 to 100 seconds
+                    } catch (IOException e) {
+                        e.printStackTrace();
+                    } catch (InterruptedException e) {
+                        e.printStackTrace();
+                    }
+                }
+            }
+        }).start();
+        ExecutorService executorService = Executors.newFixedThreadPool(4);
+
+        List<FutureTask<ExecutableState>> futures = Lists.newArrayList();
+        for (int i = 0; i < 5; i++) {
+            FutureTask futureTask = new FutureTask(new Callable<ExecutableState>() {
+                @Override
+                public ExecutableState call() {
+                    ExecutableState result = null;
+                    try {
+                        result = buildSegment(cubeName, 0, Long.MAX_VALUE);
+                    } catch (Exception e) {
+                        e.printStackTrace();
+                    }
+
+                    return result;
+                }
+            });
+
+            executorService.submit(futureTask);
+            futures.add(futureTask);
+            Thread.sleep(2 * 60 * 1000); // sleep 2 mintues
+        }
+
+        generateData = false; // stop generating message to kafka
+        executorService.shutdown();
+        int succeedBuild = 0;
+        for (int i = 0; i < futures.size(); i++) {
+            ExecutableState result = futures.get(i).get(20, TimeUnit.MINUTES);
+            logger.info("Checking building task " + i + " whose state is " + result);
+            Assert.assertTrue(result == null || result == ExecutableState.SUCCEED || result == ExecutableState.DISCARDED );
+            if (result == ExecutableState.SUCCEED)
+                succeedBuild++;
+        }
+
+        logger.info(succeedBuild + " build jobs have been successfully completed.");
+        List<CubeSegment> segments = cubeManager.getCube(cubeName).getSegments(SegmentStatusEnum.READY);
+        Assert.assertTrue(segments.size() == succeedBuild);
+
+    }
+
+
+    private ExecutableState buildSegment(String cubeName, long startOffset, long endOffset) throws Exception {
+        CubeSegment segment = cubeManager.appendSegment(cubeManager.getCube(cubeName), 0, 0, startOffset, endOffset);
+        DefaultChainedExecutable job = EngineFactory.createBatchCubingJob(segment, "TEST");
+        jobService.addJob(job);
+        waitForJob(job.getId());
+        return job.getStatus();
+    }
+
+    protected void deployEnv() throws IOException {
+        DeployUtil.overrideJobJarLocations();
+        DeployUtil.initCliWorkDir();
+        DeployUtil.deployMetadata();
+    }
+
+    public static void beforeClass() throws Exception {
+        logger.info("Adding to classpath: " + new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
+        ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
+        System.setProperty(KylinConfig.KYLIN_CONF, HBaseMetadataTestCase.SANDBOX_TEST_DATA);
+        if (StringUtils.isEmpty(System.getProperty("hdp.version"))) {
+            throw new RuntimeException("No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.2.4.2-2");
+        }
+        HBaseMetadataTestCase.staticCreateTestMetadata(HBaseMetadataTestCase.SANDBOX_TEST_DATA);
+    }
+
+    public static void afterClass() throws Exception {
+        HBaseMetadataTestCase.staticCleanupTestMetadata();
+    }
+
+    public void after() {
+        kafkaServer.stop();
+    }
+
+    protected void waitForJob(String jobId) {
+        while (true) {
+            AbstractExecutable job = jobService.getJob(jobId);
+            if (job.getStatus() == ExecutableState.SUCCEED || job.getStatus() == ExecutableState.ERROR || job.getStatus() == ExecutableState.DISCARDED) {
+                break;
+            } else {
+                try {
+                    sleep(5000);
+                } catch (InterruptedException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+    }
+
+    public static void main(String[] args) throws Exception {
+        try {
+            beforeClass();
+
+            BuildCubeWithStream2 buildCubeWithStream = new BuildCubeWithStream2();
+            buildCubeWithStream.before();
+            buildCubeWithStream.build();
+            logger.info("Build is done");
+            buildCubeWithStream.after();
+            afterClass();
+            logger.info("Going to exit");
+            System.exit(0);
+        } catch (Exception e) {
+            logger.error("error", e);
+            System.exit(1);
+        }
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/3e081b3f/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
index 669f53e..42b117c 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
@@ -272,7 +272,7 @@ public class CubeController extends BasicController {
     @RequestMapping(value = "/{cubeName}/rebuild", method = { RequestMethod.PUT })
     @ResponseBody
     public JobInstance rebuild(@PathVariable String cubeName, @RequestBody JobBuildRequest req) {
-        return buildInternal(cubeName, req.getStartTime(), req.getEndTime(), 0, 0, req.getBuildType(), true, req.isForce() || req.isForceMergeEmptySegment());
+        return buildInternal(cubeName, req.getStartTime(), req.getEndTime(), 0, 0, req.getBuildType(), req.isForce() || req.isForceMergeEmptySegment());
     }
 
     /** Build/Rebuild a cube segment by source offset */
@@ -286,16 +286,16 @@ public class CubeController extends BasicController {
     @RequestMapping(value = "/{cubeName}/rebuild2", method = { RequestMethod.PUT })
     @ResponseBody
     public JobInstance rebuild(@PathVariable String cubeName, @RequestBody JobBuildRequest2 req) {
-        return buildInternal(cubeName, 0, 0, req.getStartSourceOffset(), req.getEndSourceOffset(), req.getBuildType(), false, req.isForce());
+        return buildInternal(cubeName, 0, 0, req.getStartSourceOffset(), req.getEndSourceOffset(), req.getBuildType(), req.isForce());
     }
 
     private JobInstance buildInternal(String cubeName, long startTime, long endTime, //
-            long startOffset, long endOffset, String buildType, boolean strictCheck, boolean force) {
+            long startOffset, long endOffset, String buildType, boolean force) {
         try {
             String submitter = SecurityContextHolder.getContext().getAuthentication().getName();
             CubeInstance cube = jobService.getCubeManager().getCube(cubeName);
             return jobService.submitJob(cube, startTime, endTime, startOffset, endOffset, //
-                    CubeBuildTypeEnum.valueOf(buildType), strictCheck, force, submitter);
+                    CubeBuildTypeEnum.valueOf(buildType), force, submitter);
         } catch (Exception e) {
             logger.error(e.getLocalizedMessage(), e);
             throw new InternalErrorException(e.getLocalizedMessage());

http://git-wip-us.apache.org/repos/asf/kylin/blob/3e081b3f/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java b/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
index 8929bf1..5c704ba 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
@@ -199,7 +199,7 @@ public class JobService extends BasicService {
 
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
     public JobInstance submitJob(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset, //
-            CubeBuildTypeEnum buildType, boolean strictCheck, boolean force, String submitter) throws IOException, JobException {
+            CubeBuildTypeEnum buildType, boolean force, String submitter) throws IOException, JobException {
 
         if (cube.getStatus() == RealizationStatusEnum.DESCBROKEN) {
             throw new BadRequestException("Broken cube " + cube.getName() + " can't be built");
@@ -211,7 +211,7 @@ public class JobService extends BasicService {
         DefaultChainedExecutable job;
 
         if (buildType == CubeBuildTypeEnum.BUILD) {
-            CubeSegment newSeg = getCubeManager().appendSegment(cube, startDate, endDate, startOffset, endOffset, strictCheck);
+            CubeSegment newSeg = getCubeManager().appendSegment(cube, startDate, endDate, startOffset, endOffset);
             job = EngineFactory.createBatchCubingJob(newSeg, submitter);
         } else if (buildType == CubeBuildTypeEnum.MERGE) {
             CubeSegment newSeg = getCubeManager().mergeSegments(cube, startDate, endDate, startOffset, endOffset, force);

http://git-wip-us.apache.org/repos/asf/kylin/blob/3e081b3f/source-kafka/src/main/java/org/apache/kylin/source/kafka/SeekOffsetStep.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/SeekOffsetStep.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/SeekOffsetStep.java
index 479f1b8..9369e6f 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/SeekOffsetStep.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/SeekOffsetStep.java
@@ -17,10 +17,6 @@
 */
 package org.apache.kylin.source.kafka;
 
-import com.google.common.base.Function;
-import com.google.common.collect.Collections2;
-import com.google.common.collect.Maps;
-import org.apache.commons.math3.util.MathUtils;
 import org.apache.kylin.source.kafka.util.KafkaClient;
 import org.apache.kylin.source.kafka.util.KafkaOffsetMapping;
 import org.apache.kafka.clients.consumer.KafkaConsumer;
@@ -38,7 +34,6 @@ import org.apache.kylin.source.kafka.config.KafkaConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.annotation.Nullable;
 import java.io.IOException;
 import java.util.List;
 import java.util.Map;
@@ -125,7 +120,7 @@ public class SeekOffsetStep extends AbstractExecutable {
             } catch (IOException e) {
                 return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
             }
-            return new ExecuteResult(ExecuteResult.State.SUCCEED, "succeed");
+            return new ExecuteResult(ExecuteResult.State.SUCCEED, "succeed, offset start: " + totalStartOffset + ", offset end: " + totalEndOffset);
         } else {
             CubeUpdate cubeBuilder = new CubeUpdate(cube);
             cubeBuilder.setToRemoveSegs(segment);


[36/50] [abbrv] kylin git commit: minor, fix license

Posted by sh...@apache.org.
minor, fix license


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/823d98af
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/823d98af
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/823d98af

Branch: refs/heads/KYLIN-1726
Commit: 823d98af75dbc8d31af2269aff9045e96900bb43
Parents: dfa3048
Author: lidongsjtu <li...@apache.org>
Authored: Tue Sep 13 23:04:40 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Tue Sep 13 23:04:40 2016 +0800

----------------------------------------------------------------------
 .travis.yml | 18 ++++++++++++++++++
 1 file changed, 18 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/823d98af/.travis.yml
----------------------------------------------------------------------
diff --git a/.travis.yml b/.travis.yml
index 7f4b7de..16ba4c1 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,3 +1,21 @@
+# Configuration file for Travis continuous integration.
+# See https://travis-ci.org/apache/calcite
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to you under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 language: java
 
 jdk:


[08/50] [abbrv] kylin git commit: KYLIN-1922 optimize needStorageAggregation check logic and make sure self-termination in coprocessor works

Posted by sh...@apache.org.
KYLIN-1922 optimize needStorageAggregation check logic and make sure self-termination in coprocessor works


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/e38557b4
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/e38557b4
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/e38557b4

Branch: refs/heads/KYLIN-1726
Commit: e38557b4d1cd1d42fe042e5500020cbfaba2d80b
Parents: e87c816
Author: Hongbin Ma <ma...@apache.org>
Authored: Fri Sep 9 15:57:25 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Fri Sep 9 16:42:33 2016 +0800

----------------------------------------------------------------------
 .../apache/kylin/common/KylinConfigBase.java    |  15 +-
 .../apache/kylin/cube/RawQueryLastHacker.java   |   7 +-
 .../cube/gridtable/CubeScanRangePlanner.java    | 340 ----------
 .../kylin/gridtable/GTAggregateScanner.java     |  10 +-
 .../apache/kylin/gridtable/GTFilterScanner.java |   6 +-
 .../GTScanExceedThresholdException.java         |   2 +-
 .../apache/kylin/gridtable/GTScanRequest.java   |  34 +-
 .../GTScanSelfTerminatedException.java          |  26 +
 .../kylin/gridtable/GTScanTimeoutException.java |   2 +-
 .../gridtable/AggregationCacheSpillTest.java    |   6 +-
 .../kylin/gridtable/DictGridTableTest.java      | 617 ------------------
 .../storage/gtrecord/CubeScanRangePlanner.java  | 357 +++++++++++
 .../storage/gtrecord/CubeSegmentScanner.java    |  14 +-
 .../gtrecord/GTCubeStorageQueryBase.java        |  36 +-
 .../storage/gtrecord/DictGridTableTest.java     | 626 +++++++++++++++++++
 .../apache/kylin/query/ITKylinQueryTest.java    |  55 +-
 .../resources/query/sql_timeout/query01.sql     |  19 +
 .../common/coprocessor/CoprocessorBehavior.java |   1 +
 .../hbase/cube/v2/CubeHBaseEndpointRPC.java     |   8 +-
 .../storage/hbase/cube/v2/CubeHBaseScanRPC.java |   2 +-
 .../hbase/cube/v2/ExpectedSizeIterator.java     |  39 +-
 .../hbase/cube/v2/HBaseReadonlyStore.java       |  11 +-
 .../coprocessor/endpoint/CubeVisitService.java  |  26 +-
 23 files changed, 1197 insertions(+), 1062 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index f0c91da..2ac9d48 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -481,8 +481,8 @@ abstract public class KylinConfigBase implements Serializable {
         return Integer.parseInt(getOptional("kylin.query.scan.threshold", "10000000"));
     }
 
-    public int getCubeVisitTimeoutTimes() {
-        return Integer.parseInt(getOptional("kylin.query.cube.visit.timeout.times", "1"));
+    public float getCubeVisitTimeoutTimes() {
+        return Float.parseFloat(getOptional("kylin.query.cube.visit.timeout.times", "1"));
     }
 
     public int getBadQueryStackTraceDepth() {
@@ -545,15 +545,6 @@ abstract public class KylinConfigBase implements Serializable {
         return Boolean.parseBoolean(this.getOptional("kylin.query.ignore_unknown_function", "false"));
     }
 
-    public String getQueryStorageVisitPlanner() {
-        return this.getOptional("kylin.query.storage.visit.planner", "org.apache.kylin.cube.gridtable.CubeScanRangePlanner");
-    }
-
-    // for test only
-    public void setQueryStorageVisitPlanner(String v) {
-        setProperty("kylin.query.storage.visit.planner", v);
-    }
-
     public int getQueryScanFuzzyKeyMax() {
         return Integer.parseInt(this.getOptional("kylin.query.scan.fuzzykey.max", "200"));
     }
@@ -573,7 +564,7 @@ abstract public class KylinConfigBase implements Serializable {
     public boolean getQueryMetricsEnabled() {
         return Boolean.parseBoolean(getOptional("kylin.query.metrics.enabled", "false"));
     }
-    
+
     public int[] getQueryMetricsPercentilesIntervals() {
         String[] dft = { "60", "300", "3600" };
         return getOptionalIntArray("kylin.query.metrics.percentiles.intervals", dft);

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/core-cube/src/main/java/org/apache/kylin/cube/RawQueryLastHacker.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/RawQueryLastHacker.java b/core-cube/src/main/java/org/apache/kylin/cube/RawQueryLastHacker.java
index 63ddac5..50c644e 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/RawQueryLastHacker.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/RawQueryLastHacker.java
@@ -44,13 +44,14 @@ public class RawQueryLastHacker {
         // We need to retrieve cube to manually add columns into sqlDigest, so that we have full-columns results as output.
         boolean isSelectAll = sqlDigest.allColumns.isEmpty() || sqlDigest.allColumns.equals(sqlDigest.filterColumns);
         for (TblColRef col : cubeDesc.listAllColumns()) {
-            if (col.getTable().equals(sqlDigest.factTable) && (cubeDesc.listDimensionColumnsIncludingDerived().contains(col) || isSelectAll)) {
-                sqlDigest.allColumns.add(col);
+            if (cubeDesc.listDimensionColumnsIncludingDerived().contains(col) || isSelectAll) {
+                if (col.getTable().equals(sqlDigest.factTable))
+                    sqlDigest.allColumns.add(col);
             }
         }
 
         for (TblColRef col : sqlDigest.allColumns) {
-            if (cubeDesc.listDimensionColumnsIncludingDerived().contains(col)) {
+            if (cubeDesc.listDimensionColumnsExcludingDerived(true).contains(col)) {
                 // For dimension columns, take them as group by columns.
                 sqlDigest.groupbyColumns.add(col);
             } else {

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeScanRangePlanner.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeScanRangePlanner.java b/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeScanRangePlanner.java
deleted file mode 100644
index a937045..0000000
--- a/core-cube/src/main/java/org/apache/kylin/cube/gridtable/CubeScanRangePlanner.java
+++ /dev/null
@@ -1,340 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.cube.gridtable;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.debug.BackdoorToggles;
-import org.apache.kylin.common.util.ByteArray;
-import org.apache.kylin.common.util.Pair;
-import org.apache.kylin.cube.CubeSegment;
-import org.apache.kylin.cube.common.FuzzyValueCombination;
-import org.apache.kylin.cube.cuboid.Cuboid;
-import org.apache.kylin.cube.model.CubeDesc;
-import org.apache.kylin.gridtable.GTInfo;
-import org.apache.kylin.gridtable.GTRecord;
-import org.apache.kylin.gridtable.GTScanRange;
-import org.apache.kylin.gridtable.GTScanRequest;
-import org.apache.kylin.gridtable.GTScanRequestBuilder;
-import org.apache.kylin.gridtable.GTUtil;
-import org.apache.kylin.gridtable.IGTComparator;
-import org.apache.kylin.metadata.filter.TupleFilter;
-import org.apache.kylin.metadata.model.FunctionDesc;
-import org.apache.kylin.metadata.model.TblColRef;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
-
-public class CubeScanRangePlanner extends ScanRangePlannerBase {
-
-    private static final Logger logger = LoggerFactory.getLogger(CubeScanRangePlanner.class);
-
-    protected int maxScanRanges;
-    protected int maxFuzzyKeys;
-
-    //non-GT
-    protected CubeSegment cubeSegment;
-    protected CubeDesc cubeDesc;
-    protected Cuboid cuboid;
-
-    public CubeScanRangePlanner(CubeSegment cubeSegment, Cuboid cuboid, TupleFilter filter, Set<TblColRef> dimensions, Set<TblColRef> groupbyDims, //
-            Collection<FunctionDesc> metrics) {
-
-        this.maxScanRanges = KylinConfig.getInstanceFromEnv().getQueryStorageVisitScanRangeMax();
-        this.maxFuzzyKeys = KylinConfig.getInstanceFromEnv().getQueryScanFuzzyKeyMax();
-
-        this.cubeSegment = cubeSegment;
-        this.cubeDesc = cubeSegment.getCubeDesc();
-        this.cuboid = cuboid;
-
-        Set<TblColRef> filterDims = Sets.newHashSet();
-        TupleFilter.collectColumns(filter, filterDims);
-
-        this.gtInfo = CubeGridTable.newGTInfo(cubeSegment, cuboid.getId());
-        CuboidToGridTableMapping mapping = cuboid.getCuboidToGridTableMapping();
-
-        IGTComparator comp = gtInfo.getCodeSystem().getComparator();
-        //start key GTRecord compare to start key GTRecord
-        this.rangeStartComparator = RecordComparators.getRangeStartComparator(comp);
-        //stop key GTRecord compare to stop key GTRecord
-        this.rangeEndComparator = RecordComparators.getRangeEndComparator(comp);
-        //start key GTRecord compare to stop key GTRecord
-        this.rangeStartEndComparator = RecordComparators.getRangeStartEndComparator(comp);
-
-        //replace the constant values in filter to dictionary codes 
-        this.gtFilter = GTUtil.convertFilterColumnsAndConstants(filter, gtInfo, mapping.getCuboidDimensionsInGTOrder(), groupbyDims);
-
-        this.gtDimensions = mapping.makeGridTableColumns(dimensions);
-        this.gtAggrGroups = mapping.makeGridTableColumns(replaceDerivedColumns(groupbyDims, cubeSegment.getCubeDesc()));
-        this.gtAggrMetrics = mapping.makeGridTableColumns(metrics);
-        this.gtAggrFuncs = mapping.makeAggrFuncs(metrics);
-
-        if (cubeSegment.getModel().getPartitionDesc().isPartitioned()) {
-            int index = mapping.getIndexOf(cubeSegment.getModel().getPartitionDesc().getPartitionDateColumnRef());
-            if (index >= 0) {
-                SegmentGTStartAndEnd segmentGTStartAndEnd = new SegmentGTStartAndEnd(cubeSegment, gtInfo);
-                this.gtStartAndEnd = segmentGTStartAndEnd.getSegmentStartAndEnd(index);
-                this.isPartitionColUsingDatetimeEncoding = segmentGTStartAndEnd.isUsingDatetimeEncoding(index);
-                this.gtPartitionCol = gtInfo.colRef(index);
-            }
-        }
-
-    }
-
-    /**
-     * constrcut GTScanRangePlanner with incomplete information. only be used for UT  
-     * @param info
-     * @param gtStartAndEnd
-     * @param gtPartitionCol
-     * @param gtFilter
-     */
-    public CubeScanRangePlanner(GTInfo info, Pair<ByteArray, ByteArray> gtStartAndEnd, TblColRef gtPartitionCol, TupleFilter gtFilter) {
-
-        this.maxScanRanges = KylinConfig.getInstanceFromEnv().getQueryStorageVisitScanRangeMax();
-        this.maxFuzzyKeys = KylinConfig.getInstanceFromEnv().getQueryScanFuzzyKeyMax();
-
-        this.gtInfo = info;
-
-        IGTComparator comp = gtInfo.getCodeSystem().getComparator();
-        //start key GTRecord compare to start key GTRecord
-        this.rangeStartComparator = RecordComparators.getRangeStartComparator(comp);
-        //stop key GTRecord compare to stop key GTRecord
-        this.rangeEndComparator = RecordComparators.getRangeEndComparator(comp);
-        //start key GTRecord compare to stop key GTRecord
-        this.rangeStartEndComparator = RecordComparators.getRangeStartEndComparator(comp);
-
-        this.gtFilter = gtFilter;
-        this.gtStartAndEnd = gtStartAndEnd;
-        this.gtPartitionCol = gtPartitionCol;
-    }
-
-    public GTScanRequest planScanRequest() {
-        GTScanRequest scanRequest;
-        List<GTScanRange> scanRanges = this.planScanRanges();
-        if (scanRanges != null && scanRanges.size() != 0) {
-            scanRequest = new GTScanRequestBuilder().setInfo(gtInfo).setRanges(scanRanges).setDimensions(gtDimensions).setAggrGroupBy(gtAggrGroups).setAggrMetrics(gtAggrMetrics).setAggrMetricsFuncs(gtAggrFuncs).setFilterPushDown(gtFilter).createGTScanRequest();
-        } else {
-            scanRequest = null;
-        }
-        return scanRequest;
-    }
-
-    /**
-     * Overwrite this method to provide smarter storage visit plans
-     * @return
-     */
-    public List<GTScanRange> planScanRanges() {
-        TupleFilter flatFilter = flattenToOrAndFilter(gtFilter);
-
-        List<Collection<ColumnRange>> orAndDimRanges = translateToOrAndDimRanges(flatFilter);
-
-        List<GTScanRange> scanRanges = Lists.newArrayListWithCapacity(orAndDimRanges.size());
-        for (Collection<ColumnRange> andDimRanges : orAndDimRanges) {
-            GTScanRange scanRange = newScanRange(andDimRanges);
-            if (scanRange != null)
-                scanRanges.add(scanRange);
-        }
-
-        List<GTScanRange> mergedRanges = mergeOverlapRanges(scanRanges);
-        mergedRanges = mergeTooManyRanges(mergedRanges, maxScanRanges);
-
-        return mergedRanges;
-    }
-
-    private Set<TblColRef> replaceDerivedColumns(Set<TblColRef> input, CubeDesc cubeDesc) {
-        Set<TblColRef> ret = Sets.newHashSet();
-        for (TblColRef col : input) {
-            if (cubeDesc.hasHostColumn(col)) {
-                for (TblColRef host : cubeDesc.getHostInfo(col).columns) {
-                    ret.add(host);
-                }
-            } else {
-                ret.add(col);
-            }
-        }
-        return ret;
-    }
-
-    protected GTScanRange newScanRange(Collection<ColumnRange> andDimRanges) {
-        GTRecord pkStart = new GTRecord(gtInfo);
-        GTRecord pkEnd = new GTRecord(gtInfo);
-        Map<Integer, Set<ByteArray>> fuzzyValues = Maps.newHashMap();
-
-        List<GTRecord> fuzzyKeys;
-
-        for (ColumnRange range : andDimRanges) {
-            if (gtPartitionCol != null && range.column.equals(gtPartitionCol)) {
-                int beginCompare = rangeStartEndComparator.comparator.compare(range.begin, gtStartAndEnd.getSecond());
-                int endCompare = rangeStartEndComparator.comparator.compare(gtStartAndEnd.getFirst(), range.end);
-
-                if ((isPartitionColUsingDatetimeEncoding && endCompare <= 0 && beginCompare < 0) || (!isPartitionColUsingDatetimeEncoding && endCompare <= 0 && beginCompare <= 0)) {
-                    //segment range is [Closed,Open), but segmentStartAndEnd.getSecond() might be rounded when using dict encoding, so use <= when has equals in condition. 
-                } else {
-                    logger.debug("Pre-check partition col filter failed, partitionColRef {}, segment start {}, segment end {}, range begin {}, range end {}", //
-                            gtPartitionCol, makeReadable(gtStartAndEnd.getFirst()), makeReadable(gtStartAndEnd.getSecond()), makeReadable(range.begin), makeReadable(range.end));
-                    return null;
-                }
-            }
-
-            int col = range.column.getColumnDesc().getZeroBasedIndex();
-            if (!gtInfo.getPrimaryKey().get(col))
-                continue;
-
-            pkStart.set(col, range.begin);
-            pkEnd.set(col, range.end);
-
-            if (range.valueSet != null && !range.valueSet.isEmpty()) {
-                fuzzyValues.put(col, range.valueSet);
-            }
-        }
-
-        fuzzyKeys =
-
-                buildFuzzyKeys(fuzzyValues);
-        return new GTScanRange(pkStart, pkEnd, fuzzyKeys);
-    }
-
-    private List<GTRecord> buildFuzzyKeys(Map<Integer, Set<ByteArray>> fuzzyValueSet) {
-        ArrayList<GTRecord> result = Lists.newArrayList();
-
-        if (fuzzyValueSet.isEmpty())
-            return result;
-
-        // debug/profiling purpose
-        if (BackdoorToggles.getDisableFuzzyKey()) {
-            logger.info("The execution of this query will not use fuzzy key");
-            return result;
-        }
-
-        List<Map<Integer, ByteArray>> fuzzyValueCombinations = FuzzyValueCombination.calculate(fuzzyValueSet, maxFuzzyKeys);
-
-        for (Map<Integer, ByteArray> fuzzyValue : fuzzyValueCombinations) {
-
-            //            BitSet bitSet = new BitSet(gtInfo.getColumnCount());
-            //            for (Map.Entry<Integer, ByteArray> entry : fuzzyValue.entrySet()) {
-            //                bitSet.set(entry.getKey());
-            //            }
-            GTRecord fuzzy = new GTRecord(gtInfo);
-            for (Map.Entry<Integer, ByteArray> entry : fuzzyValue.entrySet()) {
-                fuzzy.set(entry.getKey(), entry.getValue());
-            }
-
-            result.add(fuzzy);
-        }
-        return result;
-    }
-
-    protected List<GTScanRange> mergeOverlapRanges(List<GTScanRange> ranges) {
-        if (ranges.size() <= 1) {
-            return ranges;
-        }
-
-        // sort ranges by start key
-        Collections.sort(ranges, new Comparator<GTScanRange>() {
-            @Override
-            public int compare(GTScanRange a, GTScanRange b) {
-                return rangeStartComparator.compare(a.pkStart, b.pkStart);
-            }
-        });
-
-        // merge the overlap range
-        List<GTScanRange> mergedRanges = new ArrayList<GTScanRange>();
-        int mergeBeginIndex = 0;
-        GTRecord mergeEnd = ranges.get(0).pkEnd;
-        for (int index = 1; index < ranges.size(); index++) {
-            GTScanRange range = ranges.get(index);
-
-            // if overlap, swallow it
-            if (rangeStartEndComparator.compare(range.pkStart, mergeEnd) <= 0) {
-                mergeEnd = rangeEndComparator.max(mergeEnd, range.pkEnd);
-                continue;
-            }
-
-            // not overlap, split here
-            GTScanRange mergedRange = mergeKeyRange(ranges.subList(mergeBeginIndex, index));
-            mergedRanges.add(mergedRange);
-
-            // start new split
-            mergeBeginIndex = index;
-            mergeEnd = range.pkEnd;
-        }
-
-        // don't miss the last range
-        GTScanRange mergedRange = mergeKeyRange(ranges.subList(mergeBeginIndex, ranges.size()));
-        mergedRanges.add(mergedRange);
-
-        return mergedRanges;
-    }
-
-    private GTScanRange mergeKeyRange(List<GTScanRange> ranges) {
-        GTScanRange first = ranges.get(0);
-        if (ranges.size() == 1)
-            return first;
-
-        GTRecord start = first.pkStart;
-        GTRecord end = first.pkEnd;
-        List<GTRecord> newFuzzyKeys = new ArrayList<GTRecord>();
-
-        boolean hasNonFuzzyRange = false;
-        for (GTScanRange range : ranges) {
-            hasNonFuzzyRange = hasNonFuzzyRange || range.fuzzyKeys.isEmpty();
-            newFuzzyKeys.addAll(range.fuzzyKeys);
-            end = rangeEndComparator.max(end, range.pkEnd);
-        }
-
-        // if any range is non-fuzzy, then all fuzzy keys must be cleared
-        // also too many fuzzy keys will slow down HBase scan
-        if (hasNonFuzzyRange || newFuzzyKeys.size() > maxFuzzyKeys) {
-            newFuzzyKeys.clear();
-        }
-
-        return new GTScanRange(start, end, newFuzzyKeys);
-    }
-
-    protected List<GTScanRange> mergeTooManyRanges(List<GTScanRange> ranges, int maxRanges) {
-        if (ranges.size() <= maxRanges) {
-            return ranges;
-        }
-
-        // TODO: check the distance between range and merge the large distance range
-        List<GTScanRange> result = new ArrayList<GTScanRange>(1);
-        GTScanRange mergedRange = mergeKeyRange(ranges);
-        result.add(mergedRange);
-        return result;
-    }
-
-    public int getMaxScanRanges() {
-        return maxScanRanges;
-    }
-
-    public void setMaxScanRanges(int maxScanRanges) {
-        this.maxScanRanges = maxScanRanges;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/core-cube/src/main/java/org/apache/kylin/gridtable/GTAggregateScanner.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTAggregateScanner.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTAggregateScanner.java
index ccf4895..db38484 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTAggregateScanner.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTAggregateScanner.java
@@ -138,7 +138,10 @@ public class GTAggregateScanner implements IGTScanner {
         long count = 0;
         for (GTRecord r : inputScanner) {
 
-            count++;
+            //check deadline
+            if (count % GTScanRequest.terminateCheckInterval == 1 && System.currentTimeMillis() > deadline) {
+                throw new GTScanTimeoutException("Timeout in GTAggregateScanner with scanned count " + count);
+            }
 
             if (getNumOfSpills() == 0) {
                 //check limit
@@ -152,10 +155,7 @@ public class GTAggregateScanner implements IGTScanner {
                 aggrCache.aggregate(r, Integer.MAX_VALUE);
             }
 
-            //check deadline
-            if (count % 10000 == 1 && System.currentTimeMillis() > deadline) {
-                throw new GTScanTimeoutException("Timeout in GTAggregateScanner with scanned count " + count);
-            }
+            count++;
         }
         logger.info("GTAggregateScanner input rows: " + count);
         return aggrCache.iterator();

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/core-cube/src/main/java/org/apache/kylin/gridtable/GTFilterScanner.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTFilterScanner.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTFilterScanner.java
index 31a9599..f1f84af 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTFilterScanner.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTFilterScanner.java
@@ -132,12 +132,12 @@ public class GTFilterScanner implements IGTScanner {
     }
 
     // cache the last one input and result, can reuse because rowkey are ordered, and same input could come in small group
-    static class FilterResultCache {
+    public static class FilterResultCache {
         static final int CHECKPOINT = 10000;
         static final double HIT_RATE_THRESHOLD = 0.5;
-        static boolean ENABLED = true; // enable cache by default
+        public static boolean ENABLED = true; // enable cache by default
 
-        boolean enabled = ENABLED;
+        public boolean enabled = ENABLED;
         ImmutableBitSet colsInFilter;
         int count;
         int hit;

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanExceedThresholdException.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanExceedThresholdException.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanExceedThresholdException.java
index dd57e90..ba75962 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanExceedThresholdException.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanExceedThresholdException.java
@@ -18,7 +18,7 @@
 
 package org.apache.kylin.gridtable;
 
-public class GTScanExceedThresholdException extends RuntimeException {
+public class GTScanExceedThresholdException extends GTScanSelfTerminatedException {
 
     public GTScanExceedThresholdException(String message) {
         super(message);

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
index 4cfba1b..5d27028 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
@@ -42,6 +42,8 @@ import com.google.common.collect.Sets;
 public class GTScanRequest {
 
     private static final Logger logger = LoggerFactory.getLogger(GTScanRequest.class);
+    //it's not necessary to increase the checkInterval to very large because the check cost is not high
+    public static final int terminateCheckInterval = 1000;
 
     private GTInfo info;
     private List<GTScanRange> ranges;
@@ -55,13 +57,16 @@ public class GTScanRequest {
     private ImmutableBitSet aggrGroupBy;
     private ImmutableBitSet aggrMetrics;
     private String[] aggrMetricsFuncs;//
-    
+
     // hint to storage behavior
     private boolean allowStorageAggregation;
     private double aggCacheMemThreshold;
     private int storageScanRowNumThreshold;
     private int storagePushDownLimit;
 
+    // runtime computed fields
+    private transient boolean doingStorageAggregation = false;
+
     GTScanRequest(GTInfo info, List<GTScanRange> ranges, ImmutableBitSet dimensions, ImmutableBitSet aggrGroupBy, //
             ImmutableBitSet aggrMetrics, String[] aggrMetricsFuncs, TupleFilter filterPushDown, boolean allowStorageAggregation, //
             double aggCacheMemThreshold, int storageScanRowNumThreshold, int storagePushDownLimit) {
@@ -169,6 +174,7 @@ public class GTScanRequest {
                 logger.info("pre aggregation is not beneficial, skip it");
             } else if (this.hasAggregation()) {
                 logger.info("pre aggregating results before returning");
+                this.doingStorageAggregation = true;
                 result = new GTAggregateScanner(result, this, deadline);
             } else {
                 logger.info("has no aggregation, skip it");
@@ -178,6 +184,10 @@ public class GTScanRequest {
 
     }
 
+    public boolean isDoingStorageAggregation() {
+        return doingStorageAggregation;
+    }
+
     //touch every byte of the cell so that the cost of scanning will be truly reflected
     private int lookAndForget(IGTScanner scanner) {
         byte meaninglessByte = 0;
@@ -215,8 +225,8 @@ public class GTScanRequest {
         return ranges;
     }
 
-    public void setGTScanRanges(List<GTScanRange> ranges) {
-        this.ranges = ranges;
+    public void clearScanRanges() {
+        this.ranges = Lists.newArrayList();
     }
 
     public ImmutableBitSet getSelectedColBlocks() {
@@ -251,10 +261,6 @@ public class GTScanRequest {
         return allowStorageAggregation;
     }
 
-    public void setAllowStorageAggregation(boolean allowStorageAggregation) {
-        this.allowStorageAggregation = allowStorageAggregation;
-    }
-
     public double getAggCacheMemThreshold() {
         if (aggCacheMemThreshold < 0)
             return 0;
@@ -262,28 +268,18 @@ public class GTScanRequest {
             return aggCacheMemThreshold;
     }
 
-    public void setAggCacheMemThreshold(double gb) {
-        this.aggCacheMemThreshold = gb;
+    public void disableAggCacheMemCheck() {
+        this.aggCacheMemThreshold = 0;
     }
 
     public int getStorageScanRowNumThreshold() {
         return storageScanRowNumThreshold;
     }
 
-    public void setStorageScanRowNumThreshold(int storageScanRowNumThreshold) {
-        logger.info("storageScanRowNumThreshold is set to " + storageScanRowNumThreshold);
-        this.storageScanRowNumThreshold = storageScanRowNumThreshold;
-    }
-
     public int getStoragePushDownLimit() {
         return this.storagePushDownLimit;
     }
 
-    public void setStoragePushDownLimit(int limit) {
-        logger.info("storagePushDownLimit is set to " + storagePushDownLimit);
-        this.storagePushDownLimit = limit;
-    }
-
     @Override
     public String toString() {
         return "GTScanRequest [range=" + ranges + ", columns=" + columns + ", filterPushDown=" + filterPushDown + ", aggrGroupBy=" + aggrGroupBy + ", aggrMetrics=" + aggrMetrics + ", aggrMetricsFuncs=" + Arrays.toString(aggrMetricsFuncs) + "]";

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanSelfTerminatedException.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanSelfTerminatedException.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanSelfTerminatedException.java
new file mode 100644
index 0000000..4775ac6
--- /dev/null
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanSelfTerminatedException.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.gridtable;
+
+public class GTScanSelfTerminatedException extends RuntimeException {
+
+    public GTScanSelfTerminatedException(String s) {
+        super(s);
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanTimeoutException.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanTimeoutException.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanTimeoutException.java
index e92dae3..17a8d02 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanTimeoutException.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanTimeoutException.java
@@ -18,7 +18,7 @@
 
 package org.apache.kylin.gridtable;
 
-public class GTScanTimeoutException extends RuntimeException {
+public class GTScanTimeoutException extends GTScanSelfTerminatedException {
 
     public GTScanTimeoutException(String message) {
         super(message);

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheSpillTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheSpillTest.java b/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheSpillTest.java
index b5f6de7..38b8c90 100644
--- a/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheSpillTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheSpillTest.java
@@ -84,8 +84,7 @@ public class AggregationCacheSpillTest extends LocalFileMetadataTestCase {
             }
         };
 
-        GTScanRequest scanRequest = new GTScanRequestBuilder().setInfo(INFO).setRanges(null).setDimensions(new ImmutableBitSet(0, 3)).setAggrGroupBy(new ImmutableBitSet(0, 3)).setAggrMetrics(new ImmutableBitSet(3, 6)).setAggrMetricsFuncs(new String[] { "SUM", "SUM", "COUNT_DISTINCT" }).setFilterPushDown(null).createGTScanRequest();
-        scanRequest.setAggCacheMemThreshold(0.5);
+        GTScanRequest scanRequest = new GTScanRequestBuilder().setInfo(INFO).setRanges(null).setDimensions(new ImmutableBitSet(0, 3)).setAggrGroupBy(new ImmutableBitSet(0, 3)).setAggrMetrics(new ImmutableBitSet(3, 6)).setAggrMetricsFuncs(new String[] { "SUM", "SUM", "COUNT_DISTINCT" }).setFilterPushDown(null).setAggCacheMemThreshold(0.5).createGTScanRequest();
 
         GTAggregateScanner scanner = new GTAggregateScanner(inputScanner, scanRequest, Long.MAX_VALUE);
 
@@ -127,8 +126,7 @@ public class AggregationCacheSpillTest extends LocalFileMetadataTestCase {
         };
 
         // all-in-mem testcase
-        GTScanRequest scanRequest = new GTScanRequestBuilder().setInfo(INFO).setRanges(null).setDimensions(new ImmutableBitSet(0, 3)).setAggrGroupBy(new ImmutableBitSet(1, 3)).setAggrMetrics(new ImmutableBitSet(3, 6)).setAggrMetricsFuncs(new String[] { "SUM", "SUM", "COUNT_DISTINCT" }).setFilterPushDown(null).createGTScanRequest();
-        scanRequest.setAggCacheMemThreshold(0.5);
+        GTScanRequest scanRequest = new GTScanRequestBuilder().setInfo(INFO).setRanges(null).setDimensions(new ImmutableBitSet(0, 3)).setAggrGroupBy(new ImmutableBitSet(1, 3)).setAggrMetrics(new ImmutableBitSet(3, 6)).setAggrMetricsFuncs(new String[] { "SUM", "SUM", "COUNT_DISTINCT" }).setFilterPushDown(null).setAggCacheMemThreshold(0.5).createGTScanRequest();
 
         GTAggregateScanner scanner = new GTAggregateScanner(inputScanner, scanRequest, Long.MAX_VALUE);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/core-cube/src/test/java/org/apache/kylin/gridtable/DictGridTableTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/gridtable/DictGridTableTest.java b/core-cube/src/test/java/org/apache/kylin/gridtable/DictGridTableTest.java
deleted file mode 100644
index 7b6d3fa..0000000
--- a/core-cube/src/test/java/org/apache/kylin/gridtable/DictGridTableTest.java
+++ /dev/null
@@ -1,617 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one or more
- *  contributor license agreements. See the NOTICE file distributed with
- *  this work for additional information regarding copyright ownership.
- *  The ASF licenses this file to You under the Apache License, Version 2.0
- *  (the "License"); you may not use this file except in compliance with
- *  the License. You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package org.apache.kylin.gridtable;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-import java.math.BigDecimal;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import java.util.BitSet;
-import java.util.List;
-
-import org.apache.kylin.common.util.ByteArray;
-import org.apache.kylin.common.util.BytesSerializer;
-import org.apache.kylin.common.util.Dictionary;
-import org.apache.kylin.common.util.ImmutableBitSet;
-import org.apache.kylin.common.util.LocalFileMetadataTestCase;
-import org.apache.kylin.common.util.Pair;
-import org.apache.kylin.cube.gridtable.CubeCodeSystem;
-import org.apache.kylin.cube.gridtable.CubeScanRangePlanner;
-import org.apache.kylin.dict.NumberDictionaryBuilder;
-import org.apache.kylin.dict.StringBytesConverter;
-import org.apache.kylin.dict.TrieDictionaryBuilder;
-import org.apache.kylin.dimension.DictionaryDimEnc;
-import org.apache.kylin.dimension.DimensionEncoding;
-import org.apache.kylin.gridtable.GTFilterScanner.FilterResultCache;
-import org.apache.kylin.gridtable.GTInfo.Builder;
-import org.apache.kylin.gridtable.memstore.GTSimpleMemStore;
-import org.apache.kylin.metadata.datatype.DataType;
-import org.apache.kylin.metadata.datatype.LongMutable;
-import org.apache.kylin.metadata.filter.ColumnTupleFilter;
-import org.apache.kylin.metadata.filter.CompareTupleFilter;
-import org.apache.kylin.metadata.filter.ConstantTupleFilter;
-import org.apache.kylin.metadata.filter.ExtractTupleFilter;
-import org.apache.kylin.metadata.filter.LogicalTupleFilter;
-import org.apache.kylin.metadata.filter.TupleFilter;
-import org.apache.kylin.metadata.filter.TupleFilter.FilterOperatorEnum;
-import org.apache.kylin.metadata.model.ColumnDesc;
-import org.apache.kylin.metadata.model.TableDesc;
-import org.apache.kylin.metadata.model.TblColRef;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.google.common.collect.Lists;
-
-public class DictGridTableTest extends LocalFileMetadataTestCase {
-
-    private GridTable table;
-    private GTInfo info;
-    private CompareTupleFilter timeComp0;
-    private CompareTupleFilter timeComp1;
-    private CompareTupleFilter timeComp2;
-    private CompareTupleFilter timeComp3;
-    private CompareTupleFilter timeComp4;
-    private CompareTupleFilter timeComp5;
-    private CompareTupleFilter timeComp6;
-    private CompareTupleFilter ageComp1;
-    private CompareTupleFilter ageComp2;
-    private CompareTupleFilter ageComp3;
-    private CompareTupleFilter ageComp4;
-
-    @After
-    public void after() throws Exception {
-
-        this.cleanupTestMetadata();
-    }
-
-    @Before
-    public void setup() throws IOException {
-
-        this.createTestMetadata();
-
-        table = newTestTable();
-        info = table.getInfo();
-
-        timeComp0 = compare(info.colRef(0), FilterOperatorEnum.LT, enc(info, 0, "2015-01-14"));
-        timeComp1 = compare(info.colRef(0), FilterOperatorEnum.GT, enc(info, 0, "2015-01-14"));
-        timeComp2 = compare(info.colRef(0), FilterOperatorEnum.LT, enc(info, 0, "2015-01-13"));
-        timeComp3 = compare(info.colRef(0), FilterOperatorEnum.LT, enc(info, 0, "2015-01-15"));
-        timeComp4 = compare(info.colRef(0), FilterOperatorEnum.EQ, enc(info, 0, "2015-01-15"));
-        timeComp5 = compare(info.colRef(0), FilterOperatorEnum.GT, enc(info, 0, "2015-01-15"));
-        timeComp6 = compare(info.colRef(0), FilterOperatorEnum.EQ, enc(info, 0, "2015-01-14"));
-        ageComp1 = compare(info.colRef(1), FilterOperatorEnum.EQ, enc(info, 1, "10"));
-        ageComp2 = compare(info.colRef(1), FilterOperatorEnum.EQ, enc(info, 1, "20"));
-        ageComp3 = compare(info.colRef(1), FilterOperatorEnum.EQ, enc(info, 1, "30"));
-        ageComp4 = compare(info.colRef(1), FilterOperatorEnum.NEQ, enc(info, 1, "30"));
-
-    }
-
-    @Test
-    public void verifySegmentSkipping() {
-
-        ByteArray segmentStart = enc(info, 0, "2015-01-14");
-        ByteArray segmentStartX = enc(info, 0, "2015-01-14 00:00:00");//when partition col is dict encoded, time format will be free
-        ByteArray segmentEnd = enc(info, 0, "2015-01-15");
-        assertEquals(segmentStart, segmentStartX);
-
-        {
-            LogicalTupleFilter filter = and(timeComp0, ageComp1);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
-            List<GTScanRange> r = planner.planScanRanges();
-            assertEquals(1, r.size());//scan range are [close,close]
-            assertEquals("[null, 10]-[1421193600000, 10]", r.get(0).toString());
-            assertEquals(1, r.get(0).fuzzyKeys.size());
-            assertEquals("[[null, 10, null, null, null]]", r.get(0).fuzzyKeys.toString());
-        }
-        {
-            LogicalTupleFilter filter = and(timeComp2, ageComp1);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
-            List<GTScanRange> r = planner.planScanRanges();
-            assertEquals(0, r.size());
-        }
-        {
-            LogicalTupleFilter filter = and(timeComp4, ageComp1);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
-            List<GTScanRange> r = planner.planScanRanges();
-            assertEquals(0, r.size());
-        }
-        {
-            LogicalTupleFilter filter = and(timeComp5, ageComp1);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
-            List<GTScanRange> r = planner.planScanRanges();
-            assertEquals(0, r.size());
-        }
-        {
-            LogicalTupleFilter filter = or(and(timeComp2, ageComp1), and(timeComp1, ageComp1), and(timeComp6, ageComp1));
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
-            List<GTScanRange> r = planner.planScanRanges();
-            assertEquals(1, r.size());
-            assertEquals("[1421193600000, 10]-[null, 10]", r.get(0).toString());
-            assertEquals("[[null, 10, null, null, null], [1421193600000, 10, null, null, null]]", r.get(0).fuzzyKeys.toString());
-        }
-        {
-            LogicalTupleFilter filter = or(timeComp2, timeComp1, timeComp6);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
-            List<GTScanRange> r = planner.planScanRanges();
-            assertEquals(1, r.size());
-            assertEquals("[1421193600000, null]-[null, null]", r.get(0).toString());
-            assertEquals(0, r.get(0).fuzzyKeys.size());
-        }
-        {
-            //skip FALSE filter
-            LogicalTupleFilter filter = and(ageComp1, ConstantTupleFilter.FALSE);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
-            List<GTScanRange> r = planner.planScanRanges();
-            assertEquals(0, r.size());
-        }
-        {
-            //TRUE or FALSE filter
-            LogicalTupleFilter filter = or(ConstantTupleFilter.TRUE, ConstantTupleFilter.FALSE);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
-            List<GTScanRange> r = planner.planScanRanges();
-            assertEquals(1, r.size());
-            assertEquals("[null, null]-[null, null]", r.get(0).toString());
-        }
-        {
-            //TRUE or other filter
-            LogicalTupleFilter filter = or(ageComp1, ConstantTupleFilter.TRUE);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(segmentStart, segmentEnd), info.colRef(0), filter);
-            List<GTScanRange> r = planner.planScanRanges();
-            assertEquals(1, r.size());
-            assertEquals("[null, null]-[null, null]", r.get(0).toString());
-        }
-    }
-
-    @Test
-    public void verifySegmentSkipping2() {
-        ByteArray segmentEnd = enc(info, 0, "2015-01-15");
-
-        {
-            LogicalTupleFilter filter = and(timeComp0, ageComp1);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(new ByteArray(), segmentEnd), info.colRef(0), filter);
-            List<GTScanRange> r = planner.planScanRanges();
-            assertEquals(1, r.size());//scan range are [close,close]
-            assertEquals("[null, 10]-[1421193600000, 10]", r.get(0).toString());
-            assertEquals(1, r.get(0).fuzzyKeys.size());
-            assertEquals("[[null, 10, null, null, null]]", r.get(0).fuzzyKeys.toString());
-        }
-
-        {
-            LogicalTupleFilter filter = and(timeComp5, ageComp1);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, Pair.newPair(new ByteArray(), segmentEnd), info.colRef(0), filter);
-            List<GTScanRange> r = planner.planScanRanges();
-            assertEquals(0, r.size());//scan range are [close,close]
-        }
-    }
-
-    @Test
-    public void verifyScanRangePlanner() {
-
-        // flatten or-and & hbase fuzzy value
-        {
-            LogicalTupleFilter filter = and(timeComp1, or(ageComp1, ageComp2));
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, null, null, filter);
-            List<GTScanRange> r = planner.planScanRanges();
-            assertEquals(1, r.size());
-            assertEquals("[1421193600000, 10]-[null, 20]", r.get(0).toString());
-            assertEquals("[[null, 10, null, null, null], [null, 20, null, null, null]]", r.get(0).fuzzyKeys.toString());
-        }
-
-        // pre-evaluate ever false
-        {
-            LogicalTupleFilter filter = and(timeComp1, timeComp2);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, null, null, filter);
-            List<GTScanRange> r = planner.planScanRanges();
-            assertEquals(0, r.size());
-        }
-
-        // pre-evaluate ever true
-        {
-            LogicalTupleFilter filter = or(timeComp1, ageComp4);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, null, null, filter);
-            List<GTScanRange> r = planner.planScanRanges();
-            assertEquals("[[null, null]-[null, null]]", r.toString());
-        }
-
-        // merge overlap range
-        {
-            LogicalTupleFilter filter = or(timeComp1, timeComp3);
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, null, null, filter);
-            List<GTScanRange> r = planner.planScanRanges();
-            assertEquals("[[null, null]-[null, null]]", r.toString());
-        }
-
-        // merge too many ranges
-        {
-            LogicalTupleFilter filter = or(and(timeComp4, ageComp1), and(timeComp4, ageComp2), and(timeComp4, ageComp3));
-            CubeScanRangePlanner planner = new CubeScanRangePlanner(info, null, null, filter);
-            List<GTScanRange> r = planner.planScanRanges();
-            assertEquals(3, r.size());
-            assertEquals("[1421280000000, 10]-[1421280000000, 10]", r.get(0).toString());
-            assertEquals("[1421280000000, 20]-[1421280000000, 20]", r.get(1).toString());
-            assertEquals("[1421280000000, 30]-[1421280000000, 30]", r.get(2).toString());
-            planner.setMaxScanRanges(2);
-            List<GTScanRange> r2 = planner.planScanRanges();
-            assertEquals("[[1421280000000, 10]-[1421280000000, 30]]", r2.toString());
-        }
-    }
-
-    @Test
-    public void verifyFirstRow() throws IOException {
-        doScanAndVerify(table, new GTScanRequestBuilder().setInfo(table.getInfo()).setRanges(null).setDimensions(null).setFilterPushDown(null).createGTScanRequest(), "[1421193600000, 30, Yang, 10, 10.5]", //
-                "[1421193600000, 30, Luke, 10, 10.5]", //
-                "[1421280000000, 20, Dong, 10, 10.5]", //
-                "[1421280000000, 20, Jason, 10, 10.5]", //
-                "[1421280000000, 30, Xu, 10, 10.5]", //
-                "[1421366400000, 20, Mahone, 10, 10.5]", //
-                "[1421366400000, 20, Qianhao, 10, 10.5]", //
-                "[1421366400000, 30, George, 10, 10.5]", //
-                "[1421366400000, 30, Shaofeng, 10, 10.5]", //
-                "[1421452800000, 10, Kejia, 10, 10.5]");
-    }
-
-    //for testing GTScanRequest serialization and deserialization
-    public static GTScanRequest useDeserializedGTScanRequest(GTScanRequest origin) {
-        ByteBuffer buffer = ByteBuffer.allocate(BytesSerializer.SERIALIZE_BUFFER_SIZE);
-        GTScanRequest.serializer.serialize(origin, buffer);
-        buffer.flip();
-        GTScanRequest sGTScanRequest = GTScanRequest.serializer.deserialize(buffer);
-
-        Assert.assertArrayEquals(origin.getAggrMetricsFuncs(), sGTScanRequest.getAggrMetricsFuncs());
-        Assert.assertEquals(origin.getAggCacheMemThreshold(), sGTScanRequest.getAggCacheMemThreshold(), 0.01);
-        return sGTScanRequest;
-    }
-
-    @Test
-    public void verifyScanWithUnevaluatableFilter() throws IOException {
-        GTInfo info = table.getInfo();
-
-        CompareTupleFilter fComp = compare(info.colRef(0), FilterOperatorEnum.GT, enc(info, 0, "2015-01-14"));
-        ExtractTupleFilter fUnevaluatable = unevaluatable(info.colRef(1));
-        LogicalTupleFilter fNotPlusUnevaluatable = not(unevaluatable(info.colRef(1)));
-        LogicalTupleFilter filter = and(fComp, fUnevaluatable, fNotPlusUnevaluatable);
-
-        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null).setAggrGroupBy(setOf(0)).setAggrMetrics(setOf(3)).setAggrMetricsFuncs(new String[]{"sum"}).setFilterPushDown(filter).createGTScanRequest();
-
-        // note the unEvaluatable column 1 in filter is added to group by
-        assertEquals("GTScanRequest [range=[[null, null]-[null, null]], columns={0, 1, 3}, filterPushDown=AND [NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], [null], [null]], aggrGroupBy={0, 1}, aggrMetrics={3}, aggrMetricsFuncs=[sum]]", req.toString());
-
-        doScanAndVerify(table, useDeserializedGTScanRequest(req), "[1421280000000, 20, null, 20, null]", "[1421280000000, 30, null, 10, null]", "[1421366400000, 20, null, 20, null]", "[1421366400000, 30, null, 20, null]", "[1421452800000, 10, null, 10, null]");
-    }
-
-    @Test
-    public void verifyScanWithEvaluatableFilter() throws IOException {
-        GTInfo info = table.getInfo();
-
-        CompareTupleFilter fComp1 = compare(info.colRef(0), FilterOperatorEnum.GT, enc(info, 0, "2015-01-14"));
-        CompareTupleFilter fComp2 = compare(info.colRef(1), FilterOperatorEnum.GT, enc(info, 1, "10"));
-        LogicalTupleFilter filter = and(fComp1, fComp2);
-
-        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null).setAggrGroupBy(setOf(0)).setAggrMetrics(setOf(3)).setAggrMetricsFuncs(new String[]{"sum"}).setFilterPushDown(filter).createGTScanRequest();
-        // note the evaluatable column 1 in filter is added to returned columns but not in group by
-        assertEquals("GTScanRequest [range=[[null, null]-[null, null]], columns={0, 1, 3}, filterPushDown=AND [NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], NULL.GT_MOCKUP_TABLE.1 GT [\\x00]], aggrGroupBy={0}, aggrMetrics={3}, aggrMetricsFuncs=[sum]]", req.toString());
-
-        doScanAndVerify(table, useDeserializedGTScanRequest(req), "[1421280000000, 20, null, 30, null]", "[1421366400000, 20, null, 40, null]");
-    }
-
-    @Test
-    public void testFilterScannerPerf() throws IOException {
-        GridTable table = newTestPerfTable();
-        GTInfo info = table.getInfo();
-
-        CompareTupleFilter fComp1 = compare(info.colRef(0), FilterOperatorEnum.GT, enc(info, 0, "2015-01-14"));
-        CompareTupleFilter fComp2 = compare(info.colRef(1), FilterOperatorEnum.GT, enc(info, 1, "10"));
-        LogicalTupleFilter filter = and(fComp1, fComp2);
-
-        FilterResultCache.ENABLED = false;
-        testFilterScannerPerfInner(table, info, filter);
-        FilterResultCache.ENABLED = true;
-        testFilterScannerPerfInner(table, info, filter);
-        FilterResultCache.ENABLED = false;
-        testFilterScannerPerfInner(table, info, filter);
-        FilterResultCache.ENABLED = true;
-        testFilterScannerPerfInner(table, info, filter);
-    }
-
-    @SuppressWarnings("unused")
-    private void testFilterScannerPerfInner(GridTable table, GTInfo info, LogicalTupleFilter filter) throws IOException {
-        long start = System.currentTimeMillis();
-        GTScanRequest req = new GTScanRequestBuilder().setInfo(info).setRanges(null).setDimensions(null).setFilterPushDown(filter).createGTScanRequest();
-        IGTScanner scanner = table.scan(req);
-        int i = 0;
-        for (GTRecord r : scanner) {
-            i++;
-        }
-        scanner.close();
-        long end = System.currentTimeMillis();
-        System.out.println((end - start) + "ms with filter cache enabled=" + FilterResultCache.ENABLED + ", " + i + " rows");
-    }
-
-    @Test
-    public void verifyConvertFilterConstants1() {
-        GTInfo info = table.getInfo();
-
-        TableDesc extTable = TableDesc.mockup("ext");
-        TblColRef extColA = ColumnDesc.mockup(extTable, 1, "A", "timestamp").getRef();
-        TblColRef extColB = ColumnDesc.mockup(extTable, 2, "B", "integer").getRef();
-
-        CompareTupleFilter fComp1 = compare(extColA, FilterOperatorEnum.GT, "2015-01-14");
-        CompareTupleFilter fComp2 = compare(extColB, FilterOperatorEnum.EQ, "10");
-        LogicalTupleFilter filter = and(fComp1, fComp2);
-
-        List<TblColRef> colMapping = Lists.newArrayList();
-        colMapping.add(extColA);
-        colMapping.add(extColB);
-
-        TupleFilter newFilter = GTUtil.convertFilterColumnsAndConstants(filter, info, colMapping, null);
-        assertEquals("AND [NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], NULL.GT_MOCKUP_TABLE.1 EQ [\\x00]]", newFilter.toString());
-    }
-
-    @Test
-    public void verifyConvertFilterConstants2() {
-        GTInfo info = table.getInfo();
-
-        TableDesc extTable = TableDesc.mockup("ext");
-        TblColRef extColA = ColumnDesc.mockup(extTable, 1, "A", "timestamp").getRef();
-        TblColRef extColB = ColumnDesc.mockup(extTable, 2, "B", "integer").getRef();
-
-        CompareTupleFilter fComp1 = compare(extColA, FilterOperatorEnum.GT, "2015-01-14");
-        CompareTupleFilter fComp2 = compare(extColB, FilterOperatorEnum.LT, "9");
-        LogicalTupleFilter filter = and(fComp1, fComp2);
-
-        List<TblColRef> colMapping = Lists.newArrayList();
-        colMapping.add(extColA);
-        colMapping.add(extColB);
-
-        // $1<"9" round up to $1<"10"
-        TupleFilter newFilter = GTUtil.convertFilterColumnsAndConstants(filter, info, colMapping, null);
-        assertEquals("AND [NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], NULL.GT_MOCKUP_TABLE.1 LT [\\x00]]", newFilter.toString());
-    }
-
-    @Test
-    public void verifyConvertFilterConstants3() {
-        GTInfo info = table.getInfo();
-
-        TableDesc extTable = TableDesc.mockup("ext");
-        TblColRef extColA = ColumnDesc.mockup(extTable, 1, "A", "timestamp").getRef();
-        TblColRef extColB = ColumnDesc.mockup(extTable, 2, "B", "integer").getRef();
-
-        CompareTupleFilter fComp1 = compare(extColA, FilterOperatorEnum.GT, "2015-01-14");
-        CompareTupleFilter fComp2 = compare(extColB, FilterOperatorEnum.LTE, "9");
-        LogicalTupleFilter filter = and(fComp1, fComp2);
-
-        List<TblColRef> colMapping = Lists.newArrayList();
-        colMapping.add(extColA);
-        colMapping.add(extColB);
-
-        // $1<="9" round down to FALSE
-        TupleFilter newFilter = GTUtil.convertFilterColumnsAndConstants(filter, info, colMapping, null);
-        assertEquals("AND [NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], []]", newFilter.toString());
-    }
-
-    @Test
-    public void verifyConvertFilterConstants4() {
-        GTInfo info = table.getInfo();
-
-        TableDesc extTable = TableDesc.mockup("ext");
-        TblColRef extColA = ColumnDesc.mockup(extTable, 1, "A", "timestamp").getRef();
-        TblColRef extColB = ColumnDesc.mockup(extTable, 2, "B", "integer").getRef();
-
-        CompareTupleFilter fComp1 = compare(extColA, FilterOperatorEnum.GT, "2015-01-14");
-        CompareTupleFilter fComp2 = compare(extColB, FilterOperatorEnum.IN, "9", "10", "15");
-        LogicalTupleFilter filter = and(fComp1, fComp2);
-
-        List<TblColRef> colMapping = Lists.newArrayList();
-        colMapping.add(extColA);
-        colMapping.add(extColB);
-
-        // $1 in ("9", "10", "15") has only "10" left
-        TupleFilter newFilter = GTUtil.convertFilterColumnsAndConstants(filter, info, colMapping, null);
-        assertEquals("AND [NULL.GT_MOCKUP_TABLE.0 GT [\\x00\\x00\\x01J\\xE5\\xBD\\x5C\\x00], NULL.GT_MOCKUP_TABLE.1 IN [\\x00]]", newFilter.toString());
-    }
-
-    private void doScanAndVerify(GridTable table, GTScanRequest req, String... verifyRows) throws IOException {
-        System.out.println(req);
-        IGTScanner scanner = table.scan(req);
-        int i = 0;
-        for (GTRecord r : scanner) {
-            System.out.println(r);
-            if (verifyRows == null || i >= verifyRows.length) {
-                Assert.fail();
-            }
-            assertEquals(verifyRows[i], r.toString());
-            i++;
-        }
-        scanner.close();
-    }
-
-    public static ByteArray enc(GTInfo info, int col, String value) {
-        ByteBuffer buf = ByteBuffer.allocate(info.getMaxColumnLength());
-        info.codeSystem.encodeColumnValue(col, value, buf);
-        return ByteArray.copyOf(buf.array(), buf.arrayOffset(), buf.position());
-    }
-
-    public static ExtractTupleFilter unevaluatable(TblColRef col) {
-        ExtractTupleFilter r = new ExtractTupleFilter(FilterOperatorEnum.EXTRACT);
-        r.addChild(new ColumnTupleFilter(col));
-        return r;
-    }
-
-    public static CompareTupleFilter compare(TblColRef col, FilterOperatorEnum op, Object... value) {
-        CompareTupleFilter result = new CompareTupleFilter(op);
-        result.addChild(new ColumnTupleFilter(col));
-        result.addChild(new ConstantTupleFilter(Arrays.asList(value)));
-        return result;
-    }
-
-    public static LogicalTupleFilter and(TupleFilter... children) {
-        return logic(FilterOperatorEnum.AND, children);
-    }
-
-    public static LogicalTupleFilter or(TupleFilter... children) {
-        return logic(FilterOperatorEnum.OR, children);
-    }
-
-    public static LogicalTupleFilter not(TupleFilter child) {
-        return logic(FilterOperatorEnum.NOT, child);
-    }
-
-    public static LogicalTupleFilter logic(FilterOperatorEnum op, TupleFilter... children) {
-        LogicalTupleFilter result = new LogicalTupleFilter(op);
-        for (TupleFilter c : children) {
-            result.addChild(c);
-        }
-        return result;
-    }
-
-    public static GridTable newTestTable() throws IOException {
-        GTInfo info = newInfo();
-        GTSimpleMemStore store = new GTSimpleMemStore(info);
-        GridTable table = new GridTable(info, store);
-
-        GTRecord r = new GTRecord(table.getInfo());
-        GTBuilder builder = table.rebuild();
-
-        builder.write(r.setValues("2015-01-14", "30", "Yang", new LongMutable(10), new BigDecimal("10.5")));
-        builder.write(r.setValues("2015-01-14", "30", "Luke", new LongMutable(10), new BigDecimal("10.5")));
-        builder.write(r.setValues("2015-01-15", "20", "Dong", new LongMutable(10), new BigDecimal("10.5")));
-        builder.write(r.setValues("2015-01-15", "20", "Jason", new LongMutable(10), new BigDecimal("10.5")));
-        builder.write(r.setValues("2015-01-15", "30", "Xu", new LongMutable(10), new BigDecimal("10.5")));
-        builder.write(r.setValues("2015-01-16", "20", "Mahone", new LongMutable(10), new BigDecimal("10.5")));
-        builder.write(r.setValues("2015-01-16", "20", "Qianhao", new LongMutable(10), new BigDecimal("10.5")));
-        builder.write(r.setValues("2015-01-16", "30", "George", new LongMutable(10), new BigDecimal("10.5")));
-        builder.write(r.setValues("2015-01-16", "30", "Shaofeng", new LongMutable(10), new BigDecimal("10.5")));
-        builder.write(r.setValues("2015-01-17", "10", "Kejia", new LongMutable(10), new BigDecimal("10.5")));
-        builder.close();
-
-        return table;
-    }
-
-    static GridTable newTestPerfTable() throws IOException {
-        GTInfo info = newInfo();
-        GTSimpleMemStore store = new GTSimpleMemStore(info);
-        GridTable table = new GridTable(info, store);
-
-        GTRecord r = new GTRecord(table.getInfo());
-        GTBuilder builder = table.rebuild();
-
-        for (int i = 0; i < 100000; i++) {
-            for (int j = 0; j < 10; j++)
-                builder.write(r.setValues("2015-01-14", "30", "Yang", new LongMutable(10), new BigDecimal("10.5")));
-
-            for (int j = 0; j < 10; j++)
-                builder.write(r.setValues("2015-01-14", "30", "Luke", new LongMutable(10), new BigDecimal("10.5")));
-
-            for (int j = 0; j < 10; j++)
-                builder.write(r.setValues("2015-01-15", "20", "Dong", new LongMutable(10), new BigDecimal("10.5")));
-
-            for (int j = 0; j < 10; j++)
-                builder.write(r.setValues("2015-01-15", "20", "Jason", new LongMutable(10), new BigDecimal("10.5")));
-
-            for (int j = 0; j < 10; j++)
-                builder.write(r.setValues("2015-01-15", "30", "Xu", new LongMutable(10), new BigDecimal("10.5")));
-
-            for (int j = 0; j < 10; j++)
-                builder.write(r.setValues("2015-01-16", "20", "Mahone", new LongMutable(10), new BigDecimal("10.5")));
-
-            for (int j = 0; j < 10; j++)
-                builder.write(r.setValues("2015-01-16", "20", "Qianhao", new LongMutable(10), new BigDecimal("10.5")));
-
-            for (int j = 0; j < 10; j++)
-                builder.write(r.setValues("2015-01-16", "30", "George", new LongMutable(10), new BigDecimal("10.5")));
-
-            for (int j = 0; j < 10; j++)
-                builder.write(r.setValues("2015-01-16", "30", "Shaofeng", new LongMutable(10), new BigDecimal("10.5")));
-
-            for (int j = 0; j < 10; j++)
-                builder.write(r.setValues("2015-01-17", "10", "Kejia", new LongMutable(10), new BigDecimal("10.5")));
-        }
-        builder.close();
-
-        return table;
-    }
-
-    static GTInfo newInfo() {
-        Builder builder = GTInfo.builder();
-        builder.setCodeSystem(newDictCodeSystem());
-        builder.setColumns( //
-                DataType.getType("timestamp"), //
-                DataType.getType("integer"), //
-                DataType.getType("varchar(10)"), //
-                DataType.getType("bigint"), //
-                DataType.getType("decimal") //
-        );
-        builder.setPrimaryKey(setOf(0, 1));
-        builder.setColumnPreferIndex(setOf(0));
-        builder.enableColumnBlock(new ImmutableBitSet[] { setOf(0, 1), setOf(2), setOf(3, 4) });
-        builder.enableRowBlock(4);
-        GTInfo info = builder.build();
-        return info;
-    }
-
-    @SuppressWarnings("unchecked")
-    private static CubeCodeSystem newDictCodeSystem() {
-        DimensionEncoding[] dimEncs = new DimensionEncoding[3];
-        dimEncs[1] = new DictionaryDimEnc(newDictionaryOfInteger());
-        dimEncs[2] = new DictionaryDimEnc(newDictionaryOfString());
-        return new CubeCodeSystem(dimEncs);
-    }
-
-    @SuppressWarnings("rawtypes")
-    private static Dictionary newDictionaryOfString() {
-        TrieDictionaryBuilder<String> builder = new TrieDictionaryBuilder<>(new StringBytesConverter());
-        builder.addValue("Dong");
-        builder.addValue("George");
-        builder.addValue("Jason");
-        builder.addValue("Kejia");
-        builder.addValue("Luke");
-        builder.addValue("Mahone");
-        builder.addValue("Qianhao");
-        builder.addValue("Shaofeng");
-        builder.addValue("Xu");
-        builder.addValue("Yang");
-        return builder.build(0);
-    }
-
-    @SuppressWarnings("rawtypes")
-    private static Dictionary newDictionaryOfInteger() {
-        NumberDictionaryBuilder<String> builder = new NumberDictionaryBuilder<>(new StringBytesConverter());
-        builder.addValue("10");
-        builder.addValue("20");
-        builder.addValue("30");
-        builder.addValue("40");
-        builder.addValue("50");
-        builder.addValue("60");
-        builder.addValue("70");
-        builder.addValue("80");
-        builder.addValue("90");
-        builder.addValue("100");
-        return builder.build(0);
-    }
-
-    public static ImmutableBitSet setOf(int... values) {
-        BitSet set = new BitSet();
-        for (int i : values)
-            set.set(i);
-        return new ImmutableBitSet(set);
-    }
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeScanRangePlanner.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeScanRangePlanner.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeScanRangePlanner.java
new file mode 100644
index 0000000..9f505f3
--- /dev/null
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeScanRangePlanner.java
@@ -0,0 +1,357 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.storage.gtrecord;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.debug.BackdoorToggles;
+import org.apache.kylin.common.util.ByteArray;
+import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.common.FuzzyValueCombination;
+import org.apache.kylin.cube.cuboid.Cuboid;
+import org.apache.kylin.cube.gridtable.CubeGridTable;
+import org.apache.kylin.cube.gridtable.CuboidToGridTableMapping;
+import org.apache.kylin.cube.gridtable.RecordComparators;
+import org.apache.kylin.cube.gridtable.ScanRangePlannerBase;
+import org.apache.kylin.cube.gridtable.SegmentGTStartAndEnd;
+import org.apache.kylin.cube.model.CubeDesc;
+import org.apache.kylin.gridtable.GTInfo;
+import org.apache.kylin.gridtable.GTRecord;
+import org.apache.kylin.gridtable.GTScanRange;
+import org.apache.kylin.gridtable.GTScanRequest;
+import org.apache.kylin.gridtable.GTScanRequestBuilder;
+import org.apache.kylin.gridtable.GTUtil;
+import org.apache.kylin.gridtable.IGTComparator;
+import org.apache.kylin.metadata.filter.TupleFilter;
+import org.apache.kylin.metadata.model.FunctionDesc;
+import org.apache.kylin.metadata.model.TblColRef;
+import org.apache.kylin.storage.StorageContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+
+public class CubeScanRangePlanner extends ScanRangePlannerBase {
+
+    private static final Logger logger = LoggerFactory.getLogger(CubeScanRangePlanner.class);
+
+    protected int maxScanRanges;
+    protected int maxFuzzyKeys;
+
+    //non-GT
+    protected CubeSegment cubeSegment;
+    protected CubeDesc cubeDesc;
+    protected Cuboid cuboid;
+
+    protected StorageContext context;
+
+    public CubeScanRangePlanner(CubeSegment cubeSegment, Cuboid cuboid, TupleFilter filter, Set<TblColRef> dimensions, Set<TblColRef> groupbyDims, //
+            Collection<FunctionDesc> metrics, StorageContext context) {
+        this.context = context;
+
+        this.maxScanRanges = KylinConfig.getInstanceFromEnv().getQueryStorageVisitScanRangeMax();
+        this.maxFuzzyKeys = KylinConfig.getInstanceFromEnv().getQueryScanFuzzyKeyMax();
+
+        this.cubeSegment = cubeSegment;
+        this.cubeDesc = cubeSegment.getCubeDesc();
+        this.cuboid = cuboid;
+
+        Set<TblColRef> filterDims = Sets.newHashSet();
+        TupleFilter.collectColumns(filter, filterDims);
+
+        this.gtInfo = CubeGridTable.newGTInfo(cubeSegment, cuboid.getId());
+        CuboidToGridTableMapping mapping = cuboid.getCuboidToGridTableMapping();
+
+        IGTComparator comp = gtInfo.getCodeSystem().getComparator();
+        //start key GTRecord compare to start key GTRecord
+        this.rangeStartComparator = RecordComparators.getRangeStartComparator(comp);
+        //stop key GTRecord compare to stop key GTRecord
+        this.rangeEndComparator = RecordComparators.getRangeEndComparator(comp);
+        //start key GTRecord compare to stop key GTRecord
+        this.rangeStartEndComparator = RecordComparators.getRangeStartEndComparator(comp);
+
+        //replace the constant values in filter to dictionary codes 
+        this.gtFilter = GTUtil.convertFilterColumnsAndConstants(filter, gtInfo, mapping.getCuboidDimensionsInGTOrder(), groupbyDims);
+
+        this.gtDimensions = mapping.makeGridTableColumns(dimensions);
+        this.gtAggrGroups = mapping.makeGridTableColumns(replaceDerivedColumns(groupbyDims, cubeSegment.getCubeDesc()));
+        this.gtAggrMetrics = mapping.makeGridTableColumns(metrics);
+        this.gtAggrFuncs = mapping.makeAggrFuncs(metrics);
+
+        if (cubeSegment.getModel().getPartitionDesc().isPartitioned()) {
+            int index = mapping.getIndexOf(cubeSegment.getModel().getPartitionDesc().getPartitionDateColumnRef());
+            if (index >= 0) {
+                SegmentGTStartAndEnd segmentGTStartAndEnd = new SegmentGTStartAndEnd(cubeSegment, gtInfo);
+                this.gtStartAndEnd = segmentGTStartAndEnd.getSegmentStartAndEnd(index);
+                this.isPartitionColUsingDatetimeEncoding = segmentGTStartAndEnd.isUsingDatetimeEncoding(index);
+                this.gtPartitionCol = gtInfo.colRef(index);
+            }
+        }
+
+    }
+
+    /**
+     * constrcut GTScanRangePlanner with incomplete information. only be used for UT  
+     * @param info
+     * @param gtStartAndEnd
+     * @param gtPartitionCol
+     * @param gtFilter
+     */
+    public CubeScanRangePlanner(GTInfo info, Pair<ByteArray, ByteArray> gtStartAndEnd, TblColRef gtPartitionCol, TupleFilter gtFilter) {
+
+        this.maxScanRanges = KylinConfig.getInstanceFromEnv().getQueryStorageVisitScanRangeMax();
+        this.maxFuzzyKeys = KylinConfig.getInstanceFromEnv().getQueryScanFuzzyKeyMax();
+
+        this.gtInfo = info;
+
+        IGTComparator comp = gtInfo.getCodeSystem().getComparator();
+        //start key GTRecord compare to start key GTRecord
+        this.rangeStartComparator = RecordComparators.getRangeStartComparator(comp);
+        //stop key GTRecord compare to stop key GTRecord
+        this.rangeEndComparator = RecordComparators.getRangeEndComparator(comp);
+        //start key GTRecord compare to stop key GTRecord
+        this.rangeStartEndComparator = RecordComparators.getRangeStartEndComparator(comp);
+
+        this.gtFilter = gtFilter;
+        this.gtStartAndEnd = gtStartAndEnd;
+        this.gtPartitionCol = gtPartitionCol;
+    }
+
+    public GTScanRequest planScanRequest() {
+        GTScanRequest scanRequest;
+        List<GTScanRange> scanRanges = this.planScanRanges();
+        if (scanRanges != null && scanRanges.size() != 0) {
+            GTScanRequestBuilder builder = new GTScanRequestBuilder().setInfo(gtInfo).setRanges(scanRanges).setDimensions(gtDimensions).//
+                    setAggrGroupBy(gtAggrGroups).setAggrMetrics(gtAggrMetrics).setAggrMetricsFuncs(gtAggrFuncs).setFilterPushDown(gtFilter).//
+                    setAllowStorageAggregation(context.isNeedStorageAggregation()).setAggCacheMemThreshold(cubeSegment.getCubeInstance().getConfig().getQueryCoprocessorMemGB()).//
+                    setStorageScanRowNumThreshold(context.getThreshold());
+
+            if (cubeDesc.supportsLimitPushDown()) {
+                builder.setStoragePushDownLimit(context.getStoragePushDownLimit());
+            }
+            scanRequest = builder.createGTScanRequest();
+        } else {
+            scanRequest = null;
+        }
+        return scanRequest;
+    }
+
+    /**
+     * Overwrite this method to provide smarter storage visit plans
+     * @return
+     */
+    public List<GTScanRange> planScanRanges() {
+        TupleFilter flatFilter = flattenToOrAndFilter(gtFilter);
+
+        List<Collection<ColumnRange>> orAndDimRanges = translateToOrAndDimRanges(flatFilter);
+
+        List<GTScanRange> scanRanges = Lists.newArrayListWithCapacity(orAndDimRanges.size());
+        for (Collection<ColumnRange> andDimRanges : orAndDimRanges) {
+            GTScanRange scanRange = newScanRange(andDimRanges);
+            if (scanRange != null)
+                scanRanges.add(scanRange);
+        }
+
+        List<GTScanRange> mergedRanges = mergeOverlapRanges(scanRanges);
+        mergedRanges = mergeTooManyRanges(mergedRanges, maxScanRanges);
+
+        return mergedRanges;
+    }
+
+    private Set<TblColRef> replaceDerivedColumns(Set<TblColRef> input, CubeDesc cubeDesc) {
+        Set<TblColRef> ret = Sets.newHashSet();
+        for (TblColRef col : input) {
+            if (cubeDesc.hasHostColumn(col)) {
+                for (TblColRef host : cubeDesc.getHostInfo(col).columns) {
+                    ret.add(host);
+                }
+            } else {
+                ret.add(col);
+            }
+        }
+        return ret;
+    }
+
+    protected GTScanRange newScanRange(Collection<ColumnRange> andDimRanges) {
+        GTRecord pkStart = new GTRecord(gtInfo);
+        GTRecord pkEnd = new GTRecord(gtInfo);
+        Map<Integer, Set<ByteArray>> fuzzyValues = Maps.newHashMap();
+
+        List<GTRecord> fuzzyKeys;
+
+        for (ColumnRange range : andDimRanges) {
+            if (gtPartitionCol != null && range.column.equals(gtPartitionCol)) {
+                int beginCompare = rangeStartEndComparator.comparator.compare(range.begin, gtStartAndEnd.getSecond());
+                int endCompare = rangeStartEndComparator.comparator.compare(gtStartAndEnd.getFirst(), range.end);
+
+                if ((isPartitionColUsingDatetimeEncoding && endCompare <= 0 && beginCompare < 0) || (!isPartitionColUsingDatetimeEncoding && endCompare <= 0 && beginCompare <= 0)) {
+                    //segment range is [Closed,Open), but segmentStartAndEnd.getSecond() might be rounded when using dict encoding, so use <= when has equals in condition. 
+                } else {
+                    logger.debug("Pre-check partition col filter failed, partitionColRef {}, segment start {}, segment end {}, range begin {}, range end {}", //
+                            gtPartitionCol, makeReadable(gtStartAndEnd.getFirst()), makeReadable(gtStartAndEnd.getSecond()), makeReadable(range.begin), makeReadable(range.end));
+                    return null;
+                }
+            }
+
+            int col = range.column.getColumnDesc().getZeroBasedIndex();
+            if (!gtInfo.getPrimaryKey().get(col))
+                continue;
+
+            pkStart.set(col, range.begin);
+            pkEnd.set(col, range.end);
+
+            if (range.valueSet != null && !range.valueSet.isEmpty()) {
+                fuzzyValues.put(col, range.valueSet);
+            }
+        }
+
+        fuzzyKeys =
+
+                buildFuzzyKeys(fuzzyValues);
+        return new GTScanRange(pkStart, pkEnd, fuzzyKeys);
+    }
+
+    private List<GTRecord> buildFuzzyKeys(Map<Integer, Set<ByteArray>> fuzzyValueSet) {
+        ArrayList<GTRecord> result = Lists.newArrayList();
+
+        if (fuzzyValueSet.isEmpty())
+            return result;
+
+        // debug/profiling purpose
+        if (BackdoorToggles.getDisableFuzzyKey()) {
+            logger.info("The execution of this query will not use fuzzy key");
+            return result;
+        }
+
+        List<Map<Integer, ByteArray>> fuzzyValueCombinations = FuzzyValueCombination.calculate(fuzzyValueSet, maxFuzzyKeys);
+
+        for (Map<Integer, ByteArray> fuzzyValue : fuzzyValueCombinations) {
+
+            //            BitSet bitSet = new BitSet(gtInfo.getColumnCount());
+            //            for (Map.Entry<Integer, ByteArray> entry : fuzzyValue.entrySet()) {
+            //                bitSet.set(entry.getKey());
+            //            }
+            GTRecord fuzzy = new GTRecord(gtInfo);
+            for (Map.Entry<Integer, ByteArray> entry : fuzzyValue.entrySet()) {
+                fuzzy.set(entry.getKey(), entry.getValue());
+            }
+
+            result.add(fuzzy);
+        }
+        return result;
+    }
+
+    protected List<GTScanRange> mergeOverlapRanges(List<GTScanRange> ranges) {
+        if (ranges.size() <= 1) {
+            return ranges;
+        }
+
+        // sort ranges by start key
+        Collections.sort(ranges, new Comparator<GTScanRange>() {
+            @Override
+            public int compare(GTScanRange a, GTScanRange b) {
+                return rangeStartComparator.compare(a.pkStart, b.pkStart);
+            }
+        });
+
+        // merge the overlap range
+        List<GTScanRange> mergedRanges = new ArrayList<GTScanRange>();
+        int mergeBeginIndex = 0;
+        GTRecord mergeEnd = ranges.get(0).pkEnd;
+        for (int index = 1; index < ranges.size(); index++) {
+            GTScanRange range = ranges.get(index);
+
+            // if overlap, swallow it
+            if (rangeStartEndComparator.compare(range.pkStart, mergeEnd) <= 0) {
+                mergeEnd = rangeEndComparator.max(mergeEnd, range.pkEnd);
+                continue;
+            }
+
+            // not overlap, split here
+            GTScanRange mergedRange = mergeKeyRange(ranges.subList(mergeBeginIndex, index));
+            mergedRanges.add(mergedRange);
+
+            // start new split
+            mergeBeginIndex = index;
+            mergeEnd = range.pkEnd;
+        }
+
+        // don't miss the last range
+        GTScanRange mergedRange = mergeKeyRange(ranges.subList(mergeBeginIndex, ranges.size()));
+        mergedRanges.add(mergedRange);
+
+        return mergedRanges;
+    }
+
+    private GTScanRange mergeKeyRange(List<GTScanRange> ranges) {
+        GTScanRange first = ranges.get(0);
+        if (ranges.size() == 1)
+            return first;
+
+        GTRecord start = first.pkStart;
+        GTRecord end = first.pkEnd;
+        List<GTRecord> newFuzzyKeys = new ArrayList<GTRecord>();
+
+        boolean hasNonFuzzyRange = false;
+        for (GTScanRange range : ranges) {
+            hasNonFuzzyRange = hasNonFuzzyRange || range.fuzzyKeys.isEmpty();
+            newFuzzyKeys.addAll(range.fuzzyKeys);
+            end = rangeEndComparator.max(end, range.pkEnd);
+        }
+
+        // if any range is non-fuzzy, then all fuzzy keys must be cleared
+        // also too many fuzzy keys will slow down HBase scan
+        if (hasNonFuzzyRange || newFuzzyKeys.size() > maxFuzzyKeys) {
+            newFuzzyKeys.clear();
+        }
+
+        return new GTScanRange(start, end, newFuzzyKeys);
+    }
+
+    protected List<GTScanRange> mergeTooManyRanges(List<GTScanRange> ranges, int maxRanges) {
+        if (ranges.size() <= maxRanges) {
+            return ranges;
+        }
+
+        // TODO: check the distance between range and merge the large distance range
+        List<GTScanRange> result = new ArrayList<GTScanRange>(1);
+        GTScanRange mergedRange = mergeKeyRange(ranges);
+        result.add(mergedRange);
+        return result;
+    }
+
+    public int getMaxScanRanges() {
+        return maxScanRanges;
+    }
+
+    public void setMaxScanRanges(int maxScanRanges) {
+        this.maxScanRanges = maxScanRanges;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java
index 3b9d9c6..f32831a 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java
@@ -23,10 +23,8 @@ import java.util.Collection;
 import java.util.Iterator;
 import java.util.Set;
 
-import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.cube.cuboid.Cuboid;
-import org.apache.kylin.cube.gridtable.CubeScanRangePlanner;
 import org.apache.kylin.dict.BuiltInFunctionTransformer;
 import org.apache.kylin.gridtable.GTInfo;
 import org.apache.kylin.gridtable.GTRecord;
@@ -67,23 +65,13 @@ public class CubeSegmentScanner implements IGTScanner {
         ITupleFilterTransformer translator = new BuiltInFunctionTransformer(cubeSeg.getDimensionEncodingMap());
         filter = translator.transform(filter);
 
-        String plannerName = KylinConfig.getInstanceFromEnv().getQueryStorageVisitPlanner();
         CubeScanRangePlanner scanRangePlanner;
         try {
-            scanRangePlanner = (CubeScanRangePlanner) Class.forName(plannerName).getConstructor(CubeSegment.class, Cuboid.class, TupleFilter.class, Set.class, Set.class, Collection.class).newInstance(cubeSeg, cuboid, filter, dimensions, groups, metrics);
+            scanRangePlanner = new CubeScanRangePlanner(cubeSeg, cuboid, filter, dimensions, groups, metrics, context);
         } catch (Exception e) {
             throw new RuntimeException(e);
         }
         scanRequest = scanRangePlanner.planScanRequest();
-        if (scanRequest != null) {
-            scanRequest.setAllowStorageAggregation(context.isNeedStorageAggregation());
-            scanRequest.setAggCacheMemThreshold(cubeSeg.getCubeInstance().getConfig().getQueryCoprocessorMemGB());
-            scanRequest.setStorageScanRowNumThreshold(context.getThreshold());//TODO: devide by shard number?
-
-            if (cubeSeg.getCubeDesc().supportsLimitPushDown()) {
-                scanRequest.setStoragePushDownLimit(context.getStoragePushDownLimit());
-            }
-        }
         scanner = new ScannerWorker(cubeSeg, cuboid, scanRequest, gtStorage);
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/e38557b4/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
index 86346f8..f0c2494 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
@@ -20,6 +20,7 @@ package org.apache.kylin.storage.gtrecord;
 
 import java.util.Collection;
 import java.util.Collections;
+import java.util.HashSet;
 import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
@@ -72,10 +73,10 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
 
     @Override
     public ITupleIterator search(StorageContext context, SQLDigest sqlDigest, TupleInfo returnTupleInfo) {
-        
+
         //cope with queries with no aggregations
         RawQueryLastHacker.hackNoAggregations(sqlDigest, cubeDesc);
-        
+
         // Customized measure taking effect: e.g. allow custom measures to help raw queries
         notifyBeforeStorageQuery(sqlDigest);
 
@@ -112,9 +113,9 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
         // replace derived columns in filter with host columns; columns on loosened condition must be added to group by
         TupleFilter filterD = translateDerived(filter, groupsD);
 
-        context.setNeedStorageAggregation(isNeedStorageAggregation(cuboid, groupsD, singleValuesD, exactAggregation));
-        enableStoragePushDownLimit(cuboid, groups, derivedPostAggregation, groupsD, filter, sqlDigest.aggregations, context);
-        setThreshold(dimensionsD, metrics, context); // set cautious threshold to prevent out of memory
+        context.setNeedStorageAggregation(isNeedStorageAggregation(cuboid, groupsD, singleValuesD));
+        enableStorageLimitIfPossible(cuboid, groups, derivedPostAggregation, groupsD, filter, sqlDigest.aggregations, context);
+        setThresholdIfNecessary(dimensionsD, metrics, context); // set cautious threshold to prevent out of memory
 
         List<CubeSegmentScanner> scanners = Lists.newArrayList();
         for (CubeSegment cubeSeg : cubeInstance.getSegments(SegmentStatusEnum.READY)) {
@@ -229,9 +230,22 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
         return resultD;
     }
 
-    public boolean isNeedStorageAggregation(Cuboid cuboid, Collection<TblColRef> groupD, Collection<TblColRef> singleValueD, boolean isExactAggregation) {
-        logger.info("Set isNeedStorageAggregation to " + !isExactAggregation);
-        return !isExactAggregation;
+    public boolean isNeedStorageAggregation(Cuboid cuboid, Collection<TblColRef> groupD, Collection<TblColRef> singleValueD) {
+
+        logger.info("GroupD :" + groupD);
+        logger.info("SingleValueD :" + singleValueD);
+        logger.info("Cuboid columns :" + cuboid.getColumns());
+
+        HashSet<TblColRef> temp = Sets.newHashSet();
+        temp.addAll(groupD);
+        temp.addAll(singleValueD);
+        if (cuboid.getColumns().size() == temp.size()) {
+            logger.info("Does not need storage aggregation");
+            return false;
+        } else {
+            logger.info("Need storage aggregation");
+            return true;
+        }
     }
 
     //exact aggregation was introduced back when we had some measures (like holistic distinct count) that is sensitive
@@ -268,7 +282,7 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
         }
 
         if (exact) {
-            logger.info("exactAggregation is true");
+            logger.info("exactAggregation is true, cuboid id is " + cuboid.getId());
         }
         return exact;
     }
@@ -355,7 +369,7 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
         }
     }
 
-    private void setThreshold(Collection<TblColRef> dimensions, Collection<FunctionDesc> metrics, StorageContext context) {
+    private void setThresholdIfNecessary(Collection<TblColRef> dimensions, Collection<FunctionDesc> metrics, StorageContext context) {
         boolean hasMemHungryMeasure = false;
         for (FunctionDesc func : metrics) {
             hasMemHungryMeasure |= func.getMeasureType().isMemoryHungry();
@@ -381,7 +395,7 @@ public abstract class GTCubeStorageQueryBase implements IStorageQuery {
         }
     }
 
-    private void enableStoragePushDownLimit(Cuboid cuboid, Collection<TblColRef> groups, Set<TblColRef> derivedPostAggregation, Collection<TblColRef> groupsD, TupleFilter filter, Collection<FunctionDesc> functionDescs, StorageContext context) {
+    private void enableStorageLimitIfPossible(Cuboid cuboid, Collection<TblColRef> groups, Set<TblColRef> derivedPostAggregation, Collection<TblColRef> groupsD, TupleFilter filter, Collection<FunctionDesc> functionDescs, StorageContext context) {
         boolean possible = true;
 
         boolean goodFilter = filter == null || (TupleFilter.isEvaluableRecursively(filter) && context.isCoprocessorEnabled());


[02/50] [abbrv] kylin git commit: minor, better logging messages

Posted by sh...@apache.org.
minor, better logging messages


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/d6801695
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/d6801695
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/d6801695

Branch: refs/heads/KYLIN-1726
Commit: d6801695240f5191716aec4cbd336470281fb20f
Parents: 01d033f
Author: Li Yang <li...@apache.org>
Authored: Thu Sep 8 17:02:17 2016 +0800
Committer: Li Yang <li...@apache.org>
Committed: Thu Sep 8 17:02:28 2016 +0800

----------------------------------------------------------------------
 build/bin/setenv.sh                                              | 2 +-
 .../java/org/apache/kylin/engine/mr/BatchMergeJobBuilder.java    | 2 +-
 .../java/org/apache/kylin/engine/mr/BatchMergeJobBuilder2.java   | 2 +-
 .../java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java  | 4 ++--
 .../org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java    | 2 +-
 5 files changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/d6801695/build/bin/setenv.sh
----------------------------------------------------------------------
diff --git a/build/bin/setenv.sh b/build/bin/setenv.sh
index d486672..317005b 100755
--- a/build/bin/setenv.sh
+++ b/build/bin/setenv.sh
@@ -20,7 +20,7 @@
 # (if your're deploying KYLIN on a powerful server and want to replace the default conservative settings)
 # uncomment following to for it to take effect
 export KYLIN_JVM_SETTINGS="-Xms1024M -Xmx4096M -Xss256K -XX:MaxPermSize=128M -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:$KYLIN_HOME/logs/kylin.gc.$$ -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=10 -XX:GCLogFileSize=64M"
-# export KYLIN_JVM_SETTINGS="-Xms16g -Xmx16g -XX:MaxPermSize=512m -XX:NewSize=3g -XX:MaxNewSize=3g -XX:SurvivorRatio=4 -XX:+CMSClassUnloadingEnabled -XX:+CMSParallelRemarkEnabled -XX:+UseConcMarkSweepGC -XX:+CMSIncrementalMode -XX:CMSInitiatingOccupancyFraction=70 -XX:+DisableExplicitGC"
+# export KYLIN_JVM_SETTINGS="-Xms16g -Xmx16g -XX:MaxPermSize=512m -XX:NewSize=3g -XX:MaxNewSize=3g -XX:SurvivorRatio=4 -XX:+CMSClassUnloadingEnabled -XX:+CMSParallelRemarkEnabled -XX:+UseConcMarkSweepGC -XX:+CMSIncrementalMode -XX:CMSInitiatingOccupancyFraction=70 -XX:+DisableExplicitGC -XX:+HeapDumpOnOutOfMemoryError"
 
 # uncomment following to for it to take effect(the values need adjusting to fit your env)
 # export KYLIN_DEBUG_SETTINGS="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false"

http://git-wip-us.apache.org/repos/asf/kylin/blob/d6801695/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder.java
index 33b6f29..0b4ae40 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder.java
@@ -56,7 +56,7 @@ public class BatchMergeJobBuilder extends JobBuilderSupport {
         final String cuboidRootPath = getCuboidRootPath(jobId);
 
         final List<CubeSegment> mergingSegments = cubeSegment.getCubeInstance().getMergingSegments(cubeSegment);
-        Preconditions.checkState(mergingSegments.size() > 1, "there should be more than 2 segments to merge");
+        Preconditions.checkState(mergingSegments.size() > 1, "there should be more than 2 segments to merge, target segment " + cubeSegment);
         final List<String> mergingSegmentIds = Lists.newArrayList();
         final List<String> mergingCuboidPaths = Lists.newArrayList();
         for (CubeSegment merging : mergingSegments) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/d6801695/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder2.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder2.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder2.java
index 289cd48..129d525 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder2.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/BatchMergeJobBuilder2.java
@@ -48,7 +48,7 @@ public class BatchMergeJobBuilder2 extends JobBuilderSupport {
         final String jobId = result.getId();
 
         final List<CubeSegment> mergingSegments = cubeSegment.getCubeInstance().getMergingSegments(cubeSegment);
-        Preconditions.checkState(mergingSegments.size() > 1, "there should be more than 2 segments to merge");
+        Preconditions.checkState(mergingSegments.size() > 1, "there should be more than 2 segments to merge, target segment " + cubeSegment);
         final List<String> mergingSegmentIds = Lists.newArrayList();
         for (CubeSegment merging : mergingSegments) {
             mergingSegmentIds.add(merging.getUuid());

http://git-wip-us.apache.org/repos/asf/kylin/blob/d6801695/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
index 1bd052d..7c2b3fd 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
@@ -169,7 +169,7 @@ public class HBaseMRSteps extends JobBuilderSupport {
 
     public List<String> getMergingHTables() {
         final List<CubeSegment> mergingSegments = ((CubeInstance) seg.getRealization()).getMergingSegments((CubeSegment) seg);
-        Preconditions.checkState(mergingSegments.size() > 1, "there should be more than 2 segments to merge");
+        Preconditions.checkState(mergingSegments.size() > 1, "there should be more than 2 segments to merge, target segment " + seg);
         final List<String> mergingHTables = Lists.newArrayList();
         for (CubeSegment merging : mergingSegments) {
             mergingHTables.add(merging.getStorageLocationIdentifier());
@@ -179,7 +179,7 @@ public class HBaseMRSteps extends JobBuilderSupport {
 
     public List<String> getMergingHDFSPaths() {
         final List<CubeSegment> mergingSegments = ((CubeInstance) seg.getRealization()).getMergingSegments((CubeSegment) seg);
-        Preconditions.checkState(mergingSegments.size() > 1, "there should be more than 2 segments to merge");
+        Preconditions.checkState(mergingSegments.size() > 1, "there should be more than 2 segments to merge, target segment " + seg);
         final List<String> mergingHDFSPaths = Lists.newArrayList();
         for (CubeSegment merging : mergingSegments) {
             mergingHDFSPaths.add(getJobWorkingDir(merging.getLastBuildJobID()));

http://git-wip-us.apache.org/repos/asf/kylin/blob/d6801695/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
index 729635b..bdd3981 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
@@ -73,7 +73,7 @@ public class ZookeeperJobLock implements JobLock {
             logger.warn("error acquire lock", e);
         }
         if (!hasLock) {
-            logger.warn("fail to acquire lock, scheduler has not been started");
+            logger.warn("fail to acquire lock, scheduler has not been started; maybe another kylin process is still running?");
             zkClient.close();
             return false;
         }


[44/50] [abbrv] kylin git commit: rename the streaming_table.json

Posted by sh...@apache.org.
rename the streaming_table.json

Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/ffdc5d21
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/ffdc5d21
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/ffdc5d21

Branch: refs/heads/KYLIN-1726
Commit: ffdc5d21d5f46daaf25a0dfed9b3afeef78fc62c
Parents: 1108d9e
Author: shaofengshi <sh...@apache.org>
Authored: Mon Sep 12 14:27:41 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Wed Sep 14 16:34:36 2016 +0800

----------------------------------------------------------------------
 .../kafka/DEFAULT.STREAMING_TABLE.json          | 21 --------------------
 .../streaming/DEFAULT.STREAMING_TABLE.json      |  6 ------
 2 files changed, 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/ffdc5d21/examples/test_case_data/localmeta/kafka/DEFAULT.STREAMING_TABLE.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/kafka/DEFAULT.STREAMING_TABLE.json b/examples/test_case_data/localmeta/kafka/DEFAULT.STREAMING_TABLE.json
deleted file mode 100644
index 6a64cce..0000000
--- a/examples/test_case_data/localmeta/kafka/DEFAULT.STREAMING_TABLE.json
+++ /dev/null
@@ -1,21 +0,0 @@
-{
- 
-  "uuid": "8b2b9dfe-777c-4d39-bf89-8472ec909193",
-  "name": "DEFAULT.STREAMING_TABLE",
-  "topic": "test_streaming_table_topic_xyz",
-  "timeout": 60000,
-  "bufferSize": 65536,
-  "parserName": "org.apache.kylin.source.kafka.TimedJsonStreamParser",
-  "last_modified": 0,
-  "clusters": [
-    {
-      "brokers": [
-        {
-          "id": 0,
-          "host": "sandbox",
-          "port": 6667
-        }
-      ]
-    }
-  ]
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/ffdc5d21/examples/test_case_data/localmeta/streaming/DEFAULT.STREAMING_TABLE.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/streaming/DEFAULT.STREAMING_TABLE.json b/examples/test_case_data/localmeta/streaming/DEFAULT.STREAMING_TABLE.json
deleted file mode 100644
index 85a477b..0000000
--- a/examples/test_case_data/localmeta/streaming/DEFAULT.STREAMING_TABLE.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
-  "uuid": "8b2b9dfe-777c-4d39-bf89-8472ec909193",
-  "name": "DEFAULT.STREAMING_TABLE",
-  "type": "kafka",
-  "last_modified": 0
-}


[32/50] [abbrv] kylin git commit: minor, in Tuple, measure auto convert to short and byte

Posted by sh...@apache.org.
minor, in Tuple, measure auto convert to short and byte


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/5dc5ac85
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/5dc5ac85
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/5dc5ac85

Branch: refs/heads/KYLIN-1726
Commit: 5dc5ac85fc2ac0671ff2e60c6538ea5448b67c83
Parents: 0362c2b
Author: Li Yang <li...@apache.org>
Authored: Tue Sep 13 15:16:47 2016 +0800
Committer: Li Yang <li...@apache.org>
Committed: Tue Sep 13 15:16:47 2016 +0800

----------------------------------------------------------------------
 .../src/main/java/org/apache/kylin/metadata/tuple/Tuple.java | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/5dc5ac85/core-metadata/src/main/java/org/apache/kylin/metadata/tuple/Tuple.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/tuple/Tuple.java b/core-metadata/src/main/java/org/apache/kylin/metadata/tuple/Tuple.java
index 54e5786..aaf9aa9 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/tuple/Tuple.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/tuple/Tuple.java
@@ -117,6 +117,10 @@ public class Tuple implements ITuple {
             fieldValue = ((BigDecimal) fieldValue).doubleValue();
         } else if ("integer".equals(dataType) && fieldValue instanceof Number) {
             fieldValue = ((Number) fieldValue).intValue();
+        } else if ("smallint".equals(dataType) && fieldValue instanceof Number) {
+            fieldValue = ((Number) fieldValue).shortValue();
+        } else if ("tinyint".equals(dataType)) {
+            fieldValue = ((Number) fieldValue).byteValue();
         } else if ("float".equals(dataType) && fieldValue instanceof BigDecimal) {
             fieldValue = ((BigDecimal) fieldValue).floatValue();
         } else if ("date".equals(dataType) && fieldValue instanceof Long) {
@@ -185,11 +189,11 @@ public class Tuple implements ITuple {
             return Long.valueOf(DateFormat.stringToMillis(strValue));
         } else if ("tinyint".equals(dataTypeName)) {
             return Byte.valueOf(strValue);
-        } else if ("short".equals(dataTypeName) || "smallint".equals(dataTypeName)) {
+        } else if ("smallint".equals(dataTypeName)) {
             return Short.valueOf(strValue);
         } else if ("integer".equals(dataTypeName)) {
             return Integer.valueOf(strValue);
-        } else if ("long".equals(dataTypeName) || "bigint".equals(dataTypeName)) {
+        } else if ("bigint".equals(dataTypeName)) {
             return Long.valueOf(strValue);
         } else if ("double".equals(dataTypeName)) {
             return Double.valueOf(strValue);


[46/50] [abbrv] kylin git commit: KYLIN-1726 use segment uuid instead of name

Posted by sh...@apache.org.
KYLIN-1726 use segment uuid instead of name


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/42dafc15
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/42dafc15
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/42dafc15

Branch: refs/heads/KYLIN-1726
Commit: 42dafc15db40731582d6257c618eff29643930a8
Parents: 81c7323
Author: shaofengshi <sh...@apache.org>
Authored: Tue Aug 30 20:41:42 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Wed Sep 14 16:34:36 2016 +0800

----------------------------------------------------------------------
 .../java/org/apache/kylin/cube/CubeManager.java |  5 +++-
 .../kylin/provision/BuildCubeWithStream.java    | 26 +++++++++++++++++---
 .../apache/kylin/source/kafka/KafkaMRInput.java |  2 +-
 .../source/kafka/hadoop/KafkaFlatTableJob.java  | 11 +++------
 .../kafka/hadoop/KafkaInputRecordReader.java    |  9 ++++---
 5 files changed, 36 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/42dafc15/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
index daeca0d..fc68798 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
@@ -444,8 +444,11 @@ public class CubeManager implements IRealizationProvider {
         updateCube(cubeBuilder);
         return newSegment;
     }
-
     public CubeSegment refreshSegment(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset) throws IOException {
+        return refreshSegment(cube, startDate, endDate, startOffset, endOffset, true);
+    }
+
+    public CubeSegment refreshSegment(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset, boolean strictChecking) throws IOException {
         checkNoBuildingSegment(cube);
 
         CubeSegment newSegment = newSegment(cube, startDate, endDate, startOffset, endOffset);

http://git-wip-us.apache.org/repos/asf/kylin/blob/42dafc15/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
index cfa9b45..2c09f48 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
@@ -50,6 +50,8 @@ import org.slf4j.LoggerFactory;
 import java.io.File;
 import java.io.IOException;
 import java.text.SimpleDateFormat;
+import java.util.HashMap;
+import java.util.List;
 import java.util.TimeZone;
 import java.util.UUID;
 
@@ -146,18 +148,34 @@ public class BuildCubeWithStream {
         //merge
         mergeSegment(cubeName, 0, 15000);
 
+        List<CubeSegment> segments = cubeManager.getCube(cubeName).getSegments();
+        Assert.assertTrue(segments.size() == 1);
+
+        CubeSegment toRefreshSeg = segments.get(0);
+        HashMap<String, String> partitionOffsetMap = toRefreshSeg.getAdditionalInfo();
+
+        refreshSegment(cubeName, toRefreshSeg.getSourceOffsetStart(), toRefreshSeg.getSourceOffsetEnd(), partitionOffsetMap);
+        segments = cubeManager.getCube(cubeName).getSegments();
+        Assert.assertTrue(segments.size() == 1);
+
     }
 
     private String mergeSegment(String cubeName, long startOffset, long endOffset) throws Exception {
-        CubeSegment segment = cubeManager.mergeSegments(cubeManager.getCube(cubeName), 0, 0, startOffset, endOffset, true);
+        CubeSegment segment = cubeManager.mergeSegments(cubeManager.getCube(cubeName), 0, 0, startOffset, endOffset, false);
         DefaultChainedExecutable job = EngineFactory.createBatchMergeJob(segment, "TEST");
         jobService.addJob(job);
         waitForJob(job.getId());
         return job.getId();
     }
 
-    private String refreshSegment(String cubeName, long startOffset, long endOffset) throws Exception {
-        CubeSegment segment = cubeManager.refreshSegment(cubeManager.getCube(cubeName), 0, 0, startOffset, endOffset);
+    private String refreshSegment(String cubeName, long startOffset, long endOffset, HashMap<String, String> partitionOffsetMap) throws Exception {
+        CubeSegment segment = cubeManager.refreshSegment(cubeManager.getCube(cubeName), 0, 0, startOffset, endOffset, false);
+        segment.setAdditionalInfo(partitionOffsetMap);
+        CubeInstance cubeInstance = cubeManager.getCube(cubeName);
+        CubeUpdate cubeBuilder = new CubeUpdate(cubeInstance);
+        cubeBuilder.setToUpdateSegs(segment);
+        cubeManager.updateCube(cubeBuilder);
+        segment = cubeManager.getCube(cubeName).getSegmentById(segment.getUuid());
         DefaultChainedExecutable job = EngineFactory.createBatchCubingJob(segment, "TEST");
         jobService.addJob(job);
         waitForJob(job.getId());
@@ -165,7 +183,7 @@ public class BuildCubeWithStream {
     }
 
     private String buildSegment(String cubeName, long startOffset, long endOffset) throws Exception {
-        CubeSegment segment = cubeManager.appendSegment(cubeManager.getCube(cubeName), 0, 0, startOffset, endOffset);
+        CubeSegment segment = cubeManager.appendSegment(cubeManager.getCube(cubeName), 0, 0, startOffset, endOffset, false);
         DefaultChainedExecutable job = EngineFactory.createBatchCubingJob(segment, "TEST");
         jobService.addJob(job);
         waitForJob(job.getId());

http://git-wip-us.apache.org/repos/asf/kylin/blob/42dafc15/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
index cfce137..a5f678f 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
@@ -165,7 +165,7 @@ public class KafkaMRInput implements IMRInput {
             jobBuilderSupport.appendMapReduceParameters(cmd);
             JobBuilderSupport.appendExecCmdParameters(cmd, BatchConstants.ARG_CUBE_NAME, seg.getRealization().getName());
             JobBuilderSupport.appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, outputPath);
-            JobBuilderSupport.appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_NAME, seg.getName());
+            JobBuilderSupport.appendExecCmdParameters(cmd, BatchConstants.ARG_SEGMENT_ID, seg.getUuid());
             JobBuilderSupport.appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, "Kylin_Save_Kafka_Data_" + seg.getRealization().getName() + "_Step");
 
             result.setMapReduceParams(cmd.toString());

http://git-wip-us.apache.org/repos/asf/kylin/blob/42dafc15/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableJob.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableJob.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableJob.java
index decfb60..87d2471 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableJob.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableJob.java
@@ -33,7 +33,6 @@ import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
 import org.apache.kylin.engine.mr.common.BatchConstants;
-import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.source.kafka.KafkaConfigManager;
 import org.apache.kylin.source.kafka.config.KafkaConfig;
 import org.slf4j.Logger;
@@ -70,14 +69,14 @@ public class KafkaFlatTableJob extends AbstractHadoopJob {
             options.addOption(OPTION_JOB_NAME);
             options.addOption(OPTION_CUBE_NAME);
             options.addOption(OPTION_OUTPUT_PATH);
-            options.addOption(OPTION_SEGMENT_NAME);
+            options.addOption(OPTION_SEGMENT_ID);
             parseOptions(options, args);
 
             job = Job.getInstance(getConf(), getOptionValue(OPTION_JOB_NAME));
             String cubeName = getOptionValue(OPTION_CUBE_NAME);
             Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
 
-            String segmentName = getOptionValue(OPTION_SEGMENT_NAME);
+            String segmentId = getOptionValue(OPTION_SEGMENT_ID);
 
             // ----------------------------------------------------------------------------
             // add metadata to distributed cache
@@ -85,7 +84,7 @@ public class KafkaFlatTableJob extends AbstractHadoopJob {
             CubeInstance cube = cubeMgr.getCube(cubeName);
 
             job.getConfiguration().set(BatchConstants.CFG_CUBE_NAME, cubeName);
-            job.getConfiguration().set(BatchConstants.CFG_CUBE_SEGMENT_NAME, segmentName);
+            job.getConfiguration().set(BatchConstants.CFG_CUBE_SEGMENT_ID, segmentId);
             logger.info("Starting: " + job.getJobName());
 
             setJobClasspath(job, cube.getConfig());
@@ -104,11 +103,9 @@ public class KafkaFlatTableJob extends AbstractHadoopJob {
             job.getConfiguration().set(CONFIG_KAFKA_TIMEOUT, String.valueOf(kafkaConfig.getTimeout()));
             job.getConfiguration().set(CONFIG_KAFKA_BUFFER_SIZE, String.valueOf(kafkaConfig.getBufferSize()));
             job.getConfiguration().set(CONFIG_KAFKA_INPUT_FORMAT, "json");
-            job.getConfiguration().set(BatchConstants.CFG_CUBE_NAME, cubeName);
-            job.getConfiguration().set(BatchConstants.CFG_CUBE_SEGMENT_NAME, segmentName);
             job.getConfiguration().set(CONFIG_KAFKA_PARSER_NAME, kafkaConfig.getParserName());
             job.getConfiguration().set(CONFIG_KAFKA_CONSUMER_GROUP, cubeName); // use cubeName as consumer group name
-            setupMapper(cube.getSegment(segmentName, SegmentStatusEnum.NEW));
+            setupMapper(cube.getSegmentById(segmentId));
             job.setNumReduceTasks(0);
             FileOutputFormat.setOutputPath(job, output);
             FileOutputFormat.setCompressOutput(job, true);

http://git-wip-us.apache.org/repos/asf/kylin/blob/42dafc15/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java
index f67fef5..6774c9d 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputRecordReader.java
@@ -105,6 +105,11 @@ public class KafkaInputRecordReader extends RecordReader<LongWritable, BytesWrit
             value = new BytesWritable();
         }
 
+        if (watermark >= latestOffset) {
+            log.info("Reach the end offset, stop reading.");
+            return false;
+        }
+
         if (messages == null) {
             log.info("{} fetching offset {} ", topic + ":" + split.getBrokers() + ":" + partition, watermark);
             TopicPartition topicPartition = new TopicPartition(topic, partition);
@@ -119,10 +124,6 @@ public class KafkaInputRecordReader extends RecordReader<LongWritable, BytesWrit
 
         if (iterator.hasNext()) {
             ConsumerRecord<String, String> message = iterator.next();
-            if (message.offset() >= latestOffset) {
-                log.info("Reach the end offset, stop reading.");
-                return false;
-            }
             key.set(message.offset());
             byte[] valuebytes = Bytes.toBytes(message.value());
             value.set(valuebytes, 0, valuebytes.length);


[45/50] [abbrv] kylin git commit: change to upper case

Posted by sh...@apache.org.
change to upper case

Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/aa308805
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/aa308805
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/aa308805

Branch: refs/heads/KYLIN-1726
Commit: aa30880578078369a5844e04a7d7ce736661e902
Parents: ffdc5d2
Author: shaofengshi <sh...@apache.org>
Authored: Mon Sep 12 14:28:50 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Wed Sep 14 16:34:36 2016 +0800

----------------------------------------------------------------------
 .../kafka/DEFAULT.STREAMING_TABLE.json          | 21 ++++++++++++++++++++
 .../streaming/DEFAULT.STREAMING_TABLE.json      |  6 ++++++
 2 files changed, 27 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/aa308805/examples/test_case_data/localmeta/kafka/DEFAULT.STREAMING_TABLE.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/kafka/DEFAULT.STREAMING_TABLE.json b/examples/test_case_data/localmeta/kafka/DEFAULT.STREAMING_TABLE.json
new file mode 100644
index 0000000..6a64cce
--- /dev/null
+++ b/examples/test_case_data/localmeta/kafka/DEFAULT.STREAMING_TABLE.json
@@ -0,0 +1,21 @@
+{
+ 
+  "uuid": "8b2b9dfe-777c-4d39-bf89-8472ec909193",
+  "name": "DEFAULT.STREAMING_TABLE",
+  "topic": "test_streaming_table_topic_xyz",
+  "timeout": 60000,
+  "bufferSize": 65536,
+  "parserName": "org.apache.kylin.source.kafka.TimedJsonStreamParser",
+  "last_modified": 0,
+  "clusters": [
+    {
+      "brokers": [
+        {
+          "id": 0,
+          "host": "sandbox",
+          "port": 6667
+        }
+      ]
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/aa308805/examples/test_case_data/localmeta/streaming/DEFAULT.STREAMING_TABLE.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/streaming/DEFAULT.STREAMING_TABLE.json b/examples/test_case_data/localmeta/streaming/DEFAULT.STREAMING_TABLE.json
new file mode 100644
index 0000000..85a477b
--- /dev/null
+++ b/examples/test_case_data/localmeta/streaming/DEFAULT.STREAMING_TABLE.json
@@ -0,0 +1,6 @@
+{
+  "uuid": "8b2b9dfe-777c-4d39-bf89-8472ec909193",
+  "name": "DEFAULT.STREAMING_TABLE",
+  "type": "kafka",
+  "last_modified": 0
+}


[10/50] [abbrv] kylin git commit: KYLIN-2005 Move all storage side behavior hints to GTScanRequest

Posted by sh...@apache.org.
KYLIN-2005 Move all storage side behavior hints to GTScanRequest


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/a2c875d8
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/a2c875d8
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/a2c875d8

Branch: refs/heads/KYLIN-1726
Commit: a2c875d8a2d06f23dd6467bbcc459bff82918295
Parents: e38557b
Author: Hongbin Ma <ma...@apache.org>
Authored: Fri Sep 9 16:46:22 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Fri Sep 9 17:47:29 2016 +0800

----------------------------------------------------------------------
 .../apache/kylin/gridtable/GTScanRequest.java   |  33 +-
 .../kylin/gridtable/GTScanRequestBuilder.java   |  30 +-
 .../kylin/gridtable/StorageSideBehavior.java    |  30 +
 .../apache/kylin/query/ITKylinQueryTest.java    |   4 +-
 .../common/coprocessor/CoprocessorBehavior.java |  30 -
 .../observer/AggregateRegionObserver.java       |  10 +-
 .../observer/AggregationScanner.java            |  16 +-
 .../coprocessor/observer/ObserverEnabler.java   |   6 +-
 .../hbase/cube/v2/CubeHBaseEndpointRPC.java     |  88 +--
 .../hbase/cube/v2/ExpectedSizeIterator.java     |   4 +-
 .../coprocessor/endpoint/CubeVisitService.java  |  18 +-
 .../endpoint/generated/CubeVisitProtos.java     | 754 ++++---------------
 .../endpoint/protobuf/CubeVisit.proto           |  13 +-
 .../observer/AggregateRegionObserverTest.java   |   6 +-
 14 files changed, 332 insertions(+), 710 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/a2c875d8/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
index 5d27028..3e57e86 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
@@ -59,6 +59,9 @@ public class GTScanRequest {
     private String[] aggrMetricsFuncs;//
 
     // hint to storage behavior
+    private String storageBehavior;
+    private long startTime;
+    private long timeout;
     private boolean allowStorageAggregation;
     private double aggCacheMemThreshold;
     private int storageScanRowNumThreshold;
@@ -69,7 +72,7 @@ public class GTScanRequest {
 
     GTScanRequest(GTInfo info, List<GTScanRange> ranges, ImmutableBitSet dimensions, ImmutableBitSet aggrGroupBy, //
             ImmutableBitSet aggrMetrics, String[] aggrMetricsFuncs, TupleFilter filterPushDown, boolean allowStorageAggregation, //
-            double aggCacheMemThreshold, int storageScanRowNumThreshold, int storagePushDownLimit) {
+            double aggCacheMemThreshold, int storageScanRowNumThreshold, int storagePushDownLimit, String storageBehavior, long startTime, long timeout) {
         this.info = info;
         if (ranges == null) {
             this.ranges = Lists.newArrayList(new GTScanRange(new GTRecord(info), new GTRecord(info)));
@@ -83,6 +86,9 @@ public class GTScanRequest {
         this.aggrMetrics = aggrMetrics;
         this.aggrMetricsFuncs = aggrMetricsFuncs;
 
+        this.storageBehavior = storageBehavior;
+        this.startTime = startTime;
+        this.timeout = timeout;
         this.allowStorageAggregation = allowStorageAggregation;
         this.aggCacheMemThreshold = aggCacheMemThreshold;
         this.storageScanRowNumThreshold = storageScanRowNumThreshold;
@@ -115,6 +121,10 @@ public class GTScanRequest {
         }
     }
 
+    public void setTimeout(long timeout) {
+        this.timeout = timeout;
+    }
+
     private void validateFilterPushDown(GTInfo info) {
         if (!hasFilterPushDown())
             return;
@@ -280,6 +290,18 @@ public class GTScanRequest {
         return this.storagePushDownLimit;
     }
 
+    public String getStorageBehavior() {
+        return storageBehavior;
+    }
+
+    public long getStartTime() {
+        return startTime;
+    }
+
+    public long getTimeout() {
+        return timeout;
+    }
+
     @Override
     public String toString() {
         return "GTScanRequest [range=" + ranges + ", columns=" + columns + ", filterPushDown=" + filterPushDown + ", aggrGroupBy=" + aggrGroupBy + ", aggrMetrics=" + aggrMetrics + ", aggrMetricsFuncs=" + Arrays.toString(aggrMetricsFuncs) + "]";
@@ -320,6 +342,9 @@ public class GTScanRequest {
             out.putDouble(value.aggCacheMemThreshold);
             BytesUtil.writeVInt(value.storageScanRowNumThreshold, out);
             BytesUtil.writeVInt(value.storagePushDownLimit, out);
+            BytesUtil.writeVLong(value.startTime, out);
+            BytesUtil.writeVLong(value.timeout, out);
+            BytesUtil.writeUTFString(value.storageBehavior, out);
         }
 
         @Override
@@ -350,11 +375,15 @@ public class GTScanRequest {
             double sAggrCacheGB = in.getDouble();
             int storageScanRowNumThreshold = BytesUtil.readVInt(in);
             int storagePushDownLimit = BytesUtil.readVInt(in);
+            long startTime = BytesUtil.readVLong(in);
+            long timeout = BytesUtil.readVLong(in);
+            String storageBehavior = BytesUtil.readUTFString(in);
 
             return new GTScanRequestBuilder().setInfo(sInfo).setRanges(sRanges).setDimensions(sColumns).//
             setAggrGroupBy(sAggGroupBy).setAggrMetrics(sAggrMetrics).setAggrMetricsFuncs(sAggrMetricFuncs).//
             setFilterPushDown(sGTFilter).setAllowStorageAggregation(sAllowPreAggr).setAggCacheMemThreshold(sAggrCacheGB).//
-            setStorageScanRowNumThreshold(storageScanRowNumThreshold).setStoragePushDownLimit(storagePushDownLimit).createGTScanRequest();
+            setStorageScanRowNumThreshold(storageScanRowNumThreshold).setStoragePushDownLimit(storagePushDownLimit).//
+            setStartTime(startTime).setTimeout(timeout).setStorageBehavior(storageBehavior).createGTScanRequest();
         }
 
         private void serializeGTRecord(GTRecord gtRecord, ByteBuffer out) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/a2c875d8/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequestBuilder.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequestBuilder.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequestBuilder.java
index c4390cd..f542de1 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequestBuilder.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequestBuilder.java
@@ -21,6 +21,7 @@ package org.apache.kylin.gridtable;
 import java.util.BitSet;
 import java.util.List;
 
+import org.apache.kylin.common.debug.BackdoorToggles;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.metadata.filter.TupleFilter;
 
@@ -36,6 +37,9 @@ public class GTScanRequestBuilder {
     private double aggCacheMemThreshold = 0;
     private int storageScanRowNumThreshold = Integer.MAX_VALUE;// storage should terminate itself when $storageScanRowNumThreshold cuboid rows are scanned, and throw exception.   
     private int storagePushDownLimit = Integer.MAX_VALUE;// storage can quit working when $toragePushDownLimit aggregated rows are produced. 
+    private long startTime = -1;
+    private long timeout = -1;
+    private String storageBehavior = null;
 
     public GTScanRequestBuilder setInfo(GTInfo info) {
         this.info = info;
@@ -92,6 +96,21 @@ public class GTScanRequestBuilder {
         return this;
     }
 
+    public GTScanRequestBuilder setStartTime(long startTime) {
+        this.startTime = startTime;
+        return this;
+    }
+
+    public GTScanRequestBuilder setTimeout(long timeout) {
+        this.timeout = timeout;
+        return this;
+    }
+
+    public GTScanRequestBuilder setStorageBehavior(String storageBehavior) {
+        this.storageBehavior = storageBehavior;
+        return this;
+    }
+
     public GTScanRequest createGTScanRequest() {
         if (aggrGroupBy == null) {
             aggrGroupBy = new ImmutableBitSet(new BitSet());
@@ -104,7 +123,14 @@ public class GTScanRequestBuilder {
         if (aggrMetricsFuncs == null) {
             aggrMetricsFuncs = new String[0];
         }
-        
-        return new GTScanRequest(info, ranges, dimensions, aggrGroupBy, aggrMetrics, aggrMetricsFuncs, filterPushDown, allowStorageAggregation, aggCacheMemThreshold, storageScanRowNumThreshold, storagePushDownLimit);
+
+        if (storageBehavior == null) {
+            storageBehavior = BackdoorToggles.getCoprocessorBehavior() == null ? StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM.toString() : BackdoorToggles.getCoprocessorBehavior();
+        }
+
+        this.startTime = startTime == -1 ? System.currentTimeMillis() : startTime;
+        this.timeout = timeout == -1 ? 300000 : timeout;
+
+        return new GTScanRequest(info, ranges, dimensions, aggrGroupBy, aggrMetrics, aggrMetricsFuncs, filterPushDown, allowStorageAggregation, aggCacheMemThreshold, storageScanRowNumThreshold, storagePushDownLimit, storageBehavior, startTime, timeout);
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/a2c875d8/core-cube/src/main/java/org/apache/kylin/gridtable/StorageSideBehavior.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/StorageSideBehavior.java b/core-cube/src/main/java/org/apache/kylin/gridtable/StorageSideBehavior.java
new file mode 100644
index 0000000..7fa93e7
--- /dev/null
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/StorageSideBehavior.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.gridtable;
+
+/**
+ */
+public enum StorageSideBehavior {
+    RAW_SCAN, //on use RegionScanner to scan raw data, for testing hbase scan speed
+    SCAN, //only scan data, used for profiling tuple scan speed. Will not return any result
+    SCAN_FILTER, //only scan+filter used,used for profiling filter speed.  Will not return any result
+    SCAN_FILTER_AGGR, //aggregate the result.  Will return results
+    SCAN_FILTER_AGGR_CHECKMEM, //default full operations. Will return results
+    SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY, // on each scan operation, delay for 10s to simulate slow queries, for test use
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/a2c875d8/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
index fc2fd52..0efea64 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
@@ -40,7 +40,7 @@ import org.apache.kylin.query.routing.Candidate;
 import org.apache.kylin.query.routing.rules.RemoveBlackoutRealizationsRule;
 import org.apache.kylin.query.schema.OLAPSchemaFactory;
 import org.apache.kylin.storage.hbase.HBaseStorage;
-import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorBehavior;
+import org.apache.kylin.gridtable.StorageSideBehavior;
 import org.apache.kylin.storage.hbase.cube.v1.coprocessor.observer.ObserverEnabler;
 import org.dbunit.database.DatabaseConnection;
 import org.dbunit.database.IDatabaseConnection;
@@ -140,7 +140,7 @@ public class ITKylinQueryTest extends KylinTestBase {
         });
 
         Map<String, String> toggles = Maps.newHashMap();
-        toggles.put(BackdoorToggles.DEBUG_TOGGLE_COPROCESSOR_BEHAVIOR, CoprocessorBehavior.SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY.toString());//delay 10ms for every scan
+        toggles.put(BackdoorToggles.DEBUG_TOGGLE_COPROCESSOR_BEHAVIOR, StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY.toString());//delay 10ms for every scan
         BackdoorToggles.setToggles(toggles);
 
         KylinConfig.getInstanceFromEnv().setProperty("kylin.query.cube.visit.timeout.times", "0.03");//set timeout to 9s

http://git-wip-us.apache.org/repos/asf/kylin/blob/a2c875d8/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorBehavior.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorBehavior.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorBehavior.java
deleted file mode 100644
index 5f21351..0000000
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/common/coprocessor/CoprocessorBehavior.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.storage.hbase.common.coprocessor;
-
-/**
- */
-public enum CoprocessorBehavior {
-    RAW_SCAN, //on use RegionScanner to scan raw data, for testing hbase scan speed
-    SCAN, //only scan data, used for profiling tuple scan speed. Will not return any result
-    SCAN_FILTER, //only scan+filter used,used for profiling filter speed.  Will not return any result
-    SCAN_FILTER_AGGR, //aggregate the result.  Will return results
-    SCAN_FILTER_AGGR_CHECKMEM, //default full operations. Will return results
-    SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY, // on each scan operation, delay for 10s to simulate slow queries, for test use
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/a2c875d8/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserver.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserver.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserver.java
index c7b650a..7139ca7 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserver.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserver.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
 import org.apache.hadoop.hbase.regionserver.RegionScanner;
-import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorBehavior;
+import org.apache.kylin.gridtable.StorageSideBehavior;
 import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorFilter;
 import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorProjector;
 import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorRowType;
@@ -85,15 +85,15 @@ public class AggregateRegionObserver extends BaseRegionObserver {
         byte[] filterBytes = scan.getAttribute(FILTER);
         CoprocessorFilter filter = CoprocessorFilter.deserialize(filterBytes);
 
-        CoprocessorBehavior coprocessorBehavior = CoprocessorBehavior.SCAN_FILTER_AGGR_CHECKMEM;
+        StorageSideBehavior storageSideBehavior = StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM;
         try {
             byte[] behavior = scan.getAttribute(BEHAVIOR);
             if (behavior != null && behavior.length != 0) {
-                coprocessorBehavior = CoprocessorBehavior.valueOf(new String(behavior));
+                storageSideBehavior = StorageSideBehavior.valueOf(new String(behavior));
             }
         } catch (Exception e) {
             LOG.error("failed to parse behavior,using default behavior SCAN_FILTER_AGGR_CHECKMEM", e);
-            coprocessorBehavior = CoprocessorBehavior.SCAN_FILTER_AGGR_CHECKMEM;
+            storageSideBehavior = StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM;
         }
 
         // start/end region operation & sync on scanner is suggested by the
@@ -103,7 +103,7 @@ public class AggregateRegionObserver extends BaseRegionObserver {
         region.startRegionOperation();
         try {
             synchronized (innerScanner) {
-                return new AggregationScanner(type, filter, projector, aggregators, innerScanner, coprocessorBehavior);
+                return new AggregationScanner(type, filter, projector, aggregators, innerScanner, storageSideBehavior);
             }
         } finally {
             region.closeRegionOperation();

http://git-wip-us.apache.org/repos/asf/kylin/blob/a2c875d8/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java
index be26142..a77f988 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java
@@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.regionserver.RegionScanner;
 import org.apache.kylin.measure.MeasureAggregator;
 import org.apache.kylin.storage.hbase.common.coprocessor.AggrKey;
-import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorBehavior;
+import org.apache.kylin.gridtable.StorageSideBehavior;
 import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorFilter;
 import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorProjector;
 import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorRowType;
@@ -39,9 +39,9 @@ import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorRowType;
 public class AggregationScanner implements RegionScanner {
 
     private RegionScanner outerScanner;
-    private CoprocessorBehavior behavior;
+    private StorageSideBehavior behavior;
 
-    public AggregationScanner(CoprocessorRowType type, CoprocessorFilter filter, CoprocessorProjector groupBy, ObserverAggregators aggrs, RegionScanner innerScanner, CoprocessorBehavior behavior) throws IOException {
+    public AggregationScanner(CoprocessorRowType type, CoprocessorFilter filter, CoprocessorProjector groupBy, ObserverAggregators aggrs, RegionScanner innerScanner, StorageSideBehavior behavior) throws IOException {
 
         AggregateRegionObserver.LOG.info("Kylin Coprocessor start");
 
@@ -79,23 +79,23 @@ public class AggregationScanner implements RegionScanner {
             Cell cell = results.get(0);
             tuple.setUnderlying(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
 
-            if (behavior == CoprocessorBehavior.SCAN) {
+            if (behavior == StorageSideBehavior.SCAN) {
                 //touch every byte of the cell so that the cost of scanning will be trully reflected
                 int endIndex = cell.getRowOffset() + cell.getRowLength();
                 for (int i = cell.getRowOffset(); i < endIndex; ++i) {
                     meaninglessByte += cell.getRowArray()[i];
                 }
             } else {
-                if (behavior.ordinal() >= CoprocessorBehavior.SCAN_FILTER.ordinal()) {
+                if (behavior.ordinal() >= StorageSideBehavior.SCAN_FILTER.ordinal()) {
                     if (filter != null && filter.evaluate(tuple) == false)
                         continue;
 
-                    if (behavior.ordinal() >= CoprocessorBehavior.SCAN_FILTER_AGGR.ordinal()) {
+                    if (behavior.ordinal() >= StorageSideBehavior.SCAN_FILTER_AGGR.ordinal()) {
                         AggrKey aggKey = projector.getAggrKey(results);
                         MeasureAggregator[] bufs = aggCache.getBuffer(aggKey);
                         aggregators.aggregate(bufs, results);
 
-                        if (behavior.ordinal() >= CoprocessorBehavior.SCAN_FILTER_AGGR_CHECKMEM.ordinal()) {
+                        if (behavior.ordinal() >= StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM.ordinal()) {
                             aggCache.checkMemoryUsage();
                         }
                     }
@@ -103,7 +103,7 @@ public class AggregationScanner implements RegionScanner {
             }
         }
 
-        if (behavior == CoprocessorBehavior.SCAN) {
+        if (behavior == StorageSideBehavior.SCAN) {
             System.out.println("meaningless byte is now " + meaninglessByte);
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/a2c875d8/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverEnabler.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverEnabler.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverEnabler.java
index f0e9bed..394b3e2 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverEnabler.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverEnabler.java
@@ -35,7 +35,7 @@ import org.apache.kylin.cube.cuboid.Cuboid;
 import org.apache.kylin.metadata.filter.TupleFilter;
 import org.apache.kylin.metadata.model.TblColRef;
 import org.apache.kylin.storage.StorageContext;
-import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorBehavior;
+import org.apache.kylin.gridtable.StorageSideBehavior;
 import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorFilter;
 import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorProjector;
 import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorRowType;
@@ -75,14 +75,14 @@ public class ObserverEnabler {
 
         if (localCoprocessor) {
             RegionScanner innerScanner = new RegionScannerAdapter(table.getScanner(scan));
-            AggregationScanner aggrScanner = new AggregationScanner(type, filter, projector, aggrs, innerScanner, CoprocessorBehavior.SCAN_FILTER_AGGR_CHECKMEM);
+            AggregationScanner aggrScanner = new AggregationScanner(type, filter, projector, aggrs, innerScanner, StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM);
             return new ResultScannerAdapter(aggrScanner);
         } else {
 
             // debug/profiling purpose
             String toggle = BackdoorToggles.getCoprocessorBehavior();
             if (toggle == null) {
-                toggle = CoprocessorBehavior.SCAN_FILTER_AGGR_CHECKMEM.toString(); //default behavior
+                toggle = StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM.toString(); //default behavior
             } else {
                 logger.info("The execution of this query will use " + toggle + " as observer's behavior");
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/a2c875d8/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
index 5b48351..573951b 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
@@ -32,7 +32,6 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
 import org.apache.hadoop.hbase.ipc.ServerRpcController;
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.debug.BackdoorToggles;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.BytesSerializer;
 import org.apache.kylin.common.util.BytesUtil;
@@ -47,7 +46,6 @@ import org.apache.kylin.gridtable.GTScanRequest;
 import org.apache.kylin.gridtable.GTScanSelfTerminatedException;
 import org.apache.kylin.gridtable.IGTScanner;
 import org.apache.kylin.storage.hbase.HBaseConnection;
-import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorBehavior;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse;
@@ -104,10 +102,6 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
     @Override
     public IGTScanner getGTScanner(final GTScanRequest scanRequest) throws IOException {
 
-        final String toggle = BackdoorToggles.getCoprocessorBehavior() == null ? CoprocessorBehavior.SCAN_FILTER_AGGR_CHECKMEM.toString() : BackdoorToggles.getCoprocessorBehavior();
-
-        logger.info("New scanner for current segment {} will use {} as endpoint's behavior", cubeSeg, toggle);
-
         Pair<Short, Short> shardNumAndBaseShard = getShardNumAndBaseShard();
         short shardNum = shardNumAndBaseShard.getFirst();
         short cuboidBaseShard = shardNumAndBaseShard.getSecond();
@@ -130,39 +124,14 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
 
         //TODO: raw scan can be constructed at region side to reduce traffic
         List<RawScan> rawScans = preparedHBaseScans(scanRequest.getGTScanRanges(), selectedColBlocks);
-        int rawScanBufferSize = BytesSerializer.SERIALIZE_BUFFER_SIZE;
-        while (true) {
-            try {
-                ByteBuffer rawScanBuffer = ByteBuffer.allocate(rawScanBufferSize);
-                BytesUtil.writeVInt(rawScans.size(), rawScanBuffer);
-                for (RawScan rs : rawScans) {
-                    RawScan.serializer.serialize(rs, rawScanBuffer);
-                }
-                rawScanBuffer.flip();
-                rawScanByteString = HBaseZeroCopyByteString.wrap(rawScanBuffer.array(), rawScanBuffer.position(), rawScanBuffer.limit());
-                break;
-            } catch (BufferOverflowException boe) {
-                logger.info("Buffer size {} cannot hold the raw scans, resizing to 4 times", rawScanBufferSize);
-                rawScanBufferSize *= 4;
-            }
-        }
+        rawScanByteString = serializeRawScans(rawScans);
+        
         scanRequest.clearScanRanges();//since raw scans are sent to coprocessor, we don't need to duplicate sending it
-
-        int scanRequestBufferSize = BytesSerializer.SERIALIZE_BUFFER_SIZE;
-        while (true) {
-            try {
-                ByteBuffer buffer = ByteBuffer.allocate(scanRequestBufferSize);
-                GTScanRequest.serializer.serialize(scanRequest, buffer);
-                buffer.flip();
-                scanRequestByteString = HBaseZeroCopyByteString.wrap(buffer.array(), buffer.position(), buffer.limit());
-                break;
-            } catch (BufferOverflowException boe) {
-                logger.info("Buffer size {} cannot hold the scan request, resizing to 4 times", scanRequestBufferSize);
-                scanRequestBufferSize *= 4;
-            }
-        }
-
-        logger.debug("Serialized scanRequestBytes {} bytes, rawScanBytesString {} bytes", scanRequestByteString.size(), rawScanByteString.size());
+        final ExpectedSizeIterator epResultItr = new ExpectedSizeIterator(shardNum);
+        scanRequest.setTimeout(epResultItr.getRpcTimeout());
+        scanRequestByteString = serializeGTScanReq(scanRequest);
+        
+        logger.info("Serialized scanRequestBytes {} bytes, rawScanBytesString {} bytes", scanRequestByteString.size(), rawScanByteString.size());
 
         logger.info("The scan {} for segment {} is as below with {} separate raw scans, shard part of start/end key is set to 0", Integer.toHexString(System.identityHashCode(scanRequest)), cubeSeg, rawScans.size());
         for (RawScan rs : rawScans) {
@@ -172,7 +141,6 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
         logger.debug("Submitting rpc to {} shards starting from shard {}, scan range count {}", shardNum, cuboidBaseShard, rawScans.size());
 
         final AtomicLong totalScannedCount = new AtomicLong(0);
-        final ExpectedSizeIterator epResultItr = new ExpectedSizeIterator(shardNum);
 
         // KylinConfig: use env instance instead of CubeSegment, because KylinConfig will share among queries
         // for different cubes until redeployment of coprocessor jar.
@@ -184,9 +152,6 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
             builder.addHbaseColumnsToGT(intList);
         }
         builder.setRowkeyPreambleSize(cubeSeg.getRowKeyPreambleSize());
-        builder.setBehavior(toggle);
-        builder.setStartTime(System.currentTimeMillis());
-        builder.setTimeout(epResultItr.getRpcTimeout());
         builder.setKylinProperties(kylinConfig.getConfigAsString());
 
         for (final Pair<byte[], byte[]> epRange : getEPKeyRanges(cuboidBaseShard, shardNum, totalShards)) {
@@ -260,6 +225,45 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
         return new GTBlobScatter(fullGTInfo, epResultItr, scanRequest.getColumns(), totalScannedCount.get(), scanRequest.getStoragePushDownLimit());
     }
 
+    private ByteString serializeGTScanReq(GTScanRequest scanRequest) {
+        ByteString scanRequestByteString;
+        int scanRequestBufferSize = BytesSerializer.SERIALIZE_BUFFER_SIZE;
+        while (true) {
+            try {
+                ByteBuffer buffer = ByteBuffer.allocate(scanRequestBufferSize);
+                GTScanRequest.serializer.serialize(scanRequest, buffer);
+                buffer.flip();
+                scanRequestByteString = HBaseZeroCopyByteString.wrap(buffer.array(), buffer.position(), buffer.limit());
+                break;
+            } catch (BufferOverflowException boe) {
+                logger.info("Buffer size {} cannot hold the scan request, resizing to 4 times", scanRequestBufferSize);
+                scanRequestBufferSize *= 4;
+            }
+        }
+        return scanRequestByteString;
+    }
+
+    private ByteString serializeRawScans(List<RawScan> rawScans) {
+        ByteString rawScanByteString;
+        int rawScanBufferSize = BytesSerializer.SERIALIZE_BUFFER_SIZE;
+        while (true) {
+            try {
+                ByteBuffer rawScanBuffer = ByteBuffer.allocate(rawScanBufferSize);
+                BytesUtil.writeVInt(rawScans.size(), rawScanBuffer);
+                for (RawScan rs : rawScans) {
+                    RawScan.serializer.serialize(rs, rawScanBuffer);
+                }
+                rawScanBuffer.flip();
+                rawScanByteString = HBaseZeroCopyByteString.wrap(rawScanBuffer.array(), rawScanBuffer.position(), rawScanBuffer.limit());
+                break;
+            } catch (BufferOverflowException boe) {
+                logger.info("Buffer size {} cannot hold the raw scans, resizing to 4 times", rawScanBufferSize);
+                rawScanBufferSize *= 4;
+            }
+        }
+        return rawScanByteString;
+    }
+
     private String getStatsString(byte[] region, CubeVisitResponse result) {
         StringBuilder sb = new StringBuilder();
         Stats stats = result.getStats();

http://git-wip-us.apache.org/repos/asf/kylin/blob/a2c875d8/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
index 442963f..f4729a3 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
@@ -92,8 +92,8 @@ class ExpectedSizeIterator implements Iterator<byte[]> {
                 if (coprocException instanceof GTScanSelfTerminatedException)
                     throw (GTScanSelfTerminatedException) coprocException;
                 else
-                    throw new RuntimeException("Error in coprocessor",coprocException);
-                
+                    throw new RuntimeException("Error in coprocessor", coprocException);
+
             } else if (ret == null) {
                 throw new RuntimeException("Timeout visiting cube! Check why coprocessor exception is not sent back? In coprocessor Self-termination is checked every " + //
                         GTScanRequest.terminateCheckInterval + " scanned rows, the configured timeout(" + timeout + ") cannot support this many scans?");

http://git-wip-us.apache.org/repos/asf/kylin/blob/a2c875d8/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
index 064d100..36adca1 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
@@ -54,11 +54,11 @@ import org.apache.kylin.gridtable.GTScanRequest;
 import org.apache.kylin.gridtable.GTScanTimeoutException;
 import org.apache.kylin.gridtable.IGTScanner;
 import org.apache.kylin.gridtable.IGTStore;
+import org.apache.kylin.gridtable.StorageSideBehavior;
 import org.apache.kylin.measure.BufferedMeasureEncoder;
 import org.apache.kylin.metadata.filter.UDF.MassInTupleFilter;
 import org.apache.kylin.metadata.model.TblColRef;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
-import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorBehavior;
 import org.apache.kylin.storage.hbase.cube.v2.CellListIterator;
 import org.apache.kylin.storage.hbase.cube.v2.CubeHBaseRPC;
 import org.apache.kylin.storage.hbase.cube.v2.HBaseReadonlyStore;
@@ -198,10 +198,10 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
             for (IntList intList : request.getHbaseColumnsToGTList()) {
                 hbaseColumnsToGT.add(intList.getIntsList());
             }
-            CoprocessorBehavior behavior = CoprocessorBehavior.valueOf(request.getBehavior());
+            StorageSideBehavior behavior = StorageSideBehavior.valueOf(scanReq.getStorageBehavior());
             final List<RawScan> hbaseRawScans = deserializeRawScans(ByteBuffer.wrap(HBaseZeroCopyByteString.zeroCopyGetBytes(request.getHbaseRawScan())));
 
-            appendProfileInfo(sb, "start latency: " + (this.serviceStartTime - request.getStartTime()));
+            appendProfileInfo(sb, "start latency: " + (this.serviceStartTime - scanReq.getStartTime()));
 
             MassInTupleFilter.VALUE_PROVIDER_FACTORY = new MassInValueProviderFactoryImpl(new MassInValueProviderFactoryImpl.DimEncAware() {
                 @Override
@@ -228,7 +228,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
 
             final Iterator<List<Cell>> allCellLists = Iterators.concat(cellListsForeachRawScan.iterator());
 
-            if (behavior.ordinal() < CoprocessorBehavior.SCAN.ordinal()) {
+            if (behavior.ordinal() < StorageSideBehavior.SCAN.ordinal()) {
                 //this is only for CoprocessorBehavior.RAW_SCAN case to profile hbase scan speed
                 List<Cell> temp = Lists.newArrayList();
                 int counter = 0;
@@ -240,12 +240,12 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
                 appendProfileInfo(sb, "scanned " + counter);
             }
 
-            if (behavior.ordinal() < CoprocessorBehavior.SCAN_FILTER_AGGR_CHECKMEM.ordinal()) {
+            if (behavior.ordinal() < StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM.ordinal()) {
                 scanReq.disableAggCacheMemCheck(); // disable mem check if so told
             }
 
             final MutableBoolean scanNormalComplete = new MutableBoolean(true);
-            final long deadline = request.getTimeout() + this.serviceStartTime;
+            final long deadline = scanReq.getTimeout() + this.serviceStartTime;
             final long storagePushDownLimit = scanReq.getStoragePushDownLimit();
 
             final CellListIterator cellListIterator = new CellListIterator() {
@@ -285,12 +285,12 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
             };
 
             IGTStore store = new HBaseReadonlyStore(cellListIterator, scanReq, hbaseRawScans.get(0).hbaseColumns, hbaseColumnsToGT, //
-                    request.getRowkeyPreambleSize(), CoprocessorBehavior.SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY.toString().equals(request.getBehavior()));
+                    request.getRowkeyPreambleSize(), StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY.toString().equals(scanReq.getStorageBehavior()));
 
             IGTScanner rawScanner = store.scan(scanReq);
             IGTScanner finalScanner = scanReq.decorateScanner(rawScanner, //
-                    behavior.ordinal() >= CoprocessorBehavior.SCAN_FILTER.ordinal(), //
-                    behavior.ordinal() >= CoprocessorBehavior.SCAN_FILTER_AGGR.ordinal(), deadline);
+                    behavior.ordinal() >= StorageSideBehavior.SCAN_FILTER.ordinal(), //
+                    behavior.ordinal() >= StorageSideBehavior.SCAN_FILTER_AGGR.ordinal(), deadline);
 
             ByteBuffer buffer = ByteBuffer.allocate(BufferedMeasureEncoder.DEFAULT_BUFFER_SIZE);
 


[13/50] [abbrv] kylin git commit: KYLIN-1922 imporve CI

Posted by sh...@apache.org.
KYLIN-1922 imporve CI


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/942406bd
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/942406bd
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/942406bd

Branch: refs/heads/KYLIN-1726
Commit: 942406bda3ec7405a6d2be27ba11bb38b5f88298
Parents: 466cf1a
Author: Hongbin Ma <ma...@apache.org>
Authored: Fri Sep 9 23:01:00 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Fri Sep 9 23:01:00 2016 +0800

----------------------------------------------------------------------
 .../apache/kylin/query/ITKylinQueryTest.java    | 22 ++++++++++++++++----
 1 file changed, 18 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/942406bd/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
index 5f6af7a..3411c91 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
@@ -44,6 +44,8 @@ import org.apache.kylin.storage.hbase.HBaseStorage;
 import org.apache.kylin.storage.hbase.cube.v1.coprocessor.observer.ObserverEnabler;
 import org.dbunit.database.DatabaseConnection;
 import org.dbunit.database.IDatabaseConnection;
+import org.hamcrest.BaseMatcher;
+import org.hamcrest.Description;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
@@ -119,6 +121,22 @@ public class ITKylinQueryTest extends KylinTestBase {
 
     @Test
     public void testTimeoutQuery() throws Exception {
+        thrown.expect(SQLException.class);
+
+        //should not break at table duplicate check, should fail at model duplicate check
+        thrown.expectCause(new BaseMatcher<Throwable>() {
+            @Override
+            public boolean matches(Object item) {
+                if (item instanceof GTScanSelfTerminatedException) {
+                    return true;
+                }
+                return false;
+            }
+
+            @Override
+            public void describeTo(Description description) {
+            }
+        });
 
         try {
 
@@ -133,10 +151,6 @@ public class ITKylinQueryTest extends KylinTestBase {
             RemoveBlackoutRealizationsRule.blackouts.add("CUBE[name=test_kylin_cube_without_slr_inner_join_empty]");
 
             execAndCompQuery(getQueryFolderPrefix() + "src/test/resources/query/sql_timeout", null, true);
-        } catch (SQLException e) {
-            if (!(e.getCause() instanceof GTScanSelfTerminatedException)) {
-                throw new RuntimeException();
-            }
         } finally {
 
             //these two cubes has RAW measure, will disturb limit push down