You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by sh...@apache.org on 2016/03/04 03:02:42 UTC

[01/43] kylin git commit: fix hardcoded path in FactDistinctColumnsReducerTest.java [Forced Update!]

Repository: kylin
Updated Branches:
  refs/heads/helix-rebase 74eadc458 -> 66b84a2c7 (forced update)


fix hardcoded path in FactDistinctColumnsReducerTest.java


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/66294d3e
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/66294d3e
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/66294d3e

Branch: refs/heads/helix-rebase
Commit: 66294d3e73cc618bdb9e19f7f1848f85e263d680
Parents: 294fc70
Author: shaofengshi <sh...@apache.org>
Authored: Sat Feb 27 22:49:41 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Sat Feb 27 23:18:28 2016 +0800

----------------------------------------------------------------------
 .../kylin/engine/mr/steps/FactDistinctColumnsReducerTest.java    | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/66294d3e/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducerTest.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducerTest.java b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducerTest.java
index f46683e..928db74 100644
--- a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducerTest.java
+++ b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducerTest.java
@@ -1,5 +1,6 @@
 package org.apache.kylin.engine.mr.steps;
 
+import java.io.File;
 import java.io.IOException;
 import java.util.Map;
 import java.util.UUID;
@@ -22,7 +23,8 @@ public class FactDistinctColumnsReducerTest {
     public void testWriteCuboidStatistics() throws IOException {
 
         final Configuration conf = HadoopUtil.getCurrentConfiguration();
-        final Path outputPath = new Path("file:///tmp/kylin/cuboidstatistics/" + UUID.randomUUID().toString());
+        File tmp = File.createTempFile("cuboidstatistics", "");
+        final Path outputPath = new Path(tmp.getParent().toString() + File.separator + UUID.randomUUID().toString());
         if (!FileSystem.getLocal(conf).exists(outputPath)) {
             //            FileSystem.getLocal(conf).create(outputPath);
         }


[12/43] kylin git commit: change ‘float’ to ‘double’ when sync a hive table into kylin

Posted by sh...@apache.org.
change ‘float’ to ‘double’ when sync a hive table into kylin


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/3fb67ca7
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/3fb67ca7
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/3fb67ca7

Branch: refs/heads/helix-rebase
Commit: 3fb67ca78a6059aa0f350607299cc3551042b1b5
Parents: bc7d4f5
Author: shaofengshi <sh...@apache.org>
Authored: Wed Mar 2 17:12:10 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Wed Mar 2 17:12:10 2016 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/source/hive/HiveSourceTableLoader.java   | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/3fb67ca7/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
index f2f2d2a..2aef4e6 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
@@ -123,7 +123,12 @@ public class HiveSourceTableLoader {
                 FieldSchema field = fields.get(i);
                 ColumnDesc cdesc = new ColumnDesc();
                 cdesc.setName(field.getName().toUpperCase());
-                cdesc.setDatatype(field.getType());
+                // use "double" in kylin for "float"
+                if ("float".equalsIgnoreCase(field.getType())) {
+                    cdesc.setDatatype("double");
+                } else {
+                    cdesc.setDatatype(field.getType());
+                }
                 cdesc.setId(String.valueOf(i + 1));
                 columns.add(cdesc);
             }


[18/43] kylin git commit: KYLIN-1466 Update kylin.sh and bring denpendencies to runnable classes

Posted by sh...@apache.org.
KYLIN-1466 Update kylin.sh and bring denpendencies to runnable classes


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/daeaf084
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/daeaf084
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/daeaf084

Branch: refs/heads/helix-rebase
Commit: daeaf08444ebd86a9de4e8addafa7541c25523be
Parents: 2d4922d
Author: lidongsjtu <li...@apache.org>
Authored: Thu Mar 3 23:26:32 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Thu Mar 3 23:26:55 2016 +0800

----------------------------------------------------------------------
 build/bin/kylin.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/daeaf084/build/bin/kylin.sh
----------------------------------------------------------------------
diff --git a/build/bin/kylin.sh b/build/bin/kylin.sh
index a21928e..f4d9fac 100644
--- a/build/bin/kylin.sh
+++ b/build/bin/kylin.sh
@@ -206,7 +206,7 @@ then
 
     export HBASE_CLASSPATH=${KYLIN_HOME}/lib/*:$hive_dependency:${HBASE_CLASSPATH}
 
-    exec hbase -Dlog4j.configuration=kylin-log4j.properties "$@"
+    exec hbase ${KYLIN_EXTRA_START_OPTS} -Dkylin.hive.dependency=${hive_dependency} -Dkylin.hbase.dependency=${hbase_dependency} -Dlog4j.configuration=kylin-log4j.properties "$@"
 
 else
     echo "usage: kylin.sh start or kylin.sh stop"


[05/43] kylin git commit: KYLIN-1054 Support Hive client Beeline

Posted by sh...@apache.org.
KYLIN-1054 Support Hive client Beeline


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/9c77a5eb
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/9c77a5eb
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/9c77a5eb

Branch: refs/heads/helix-rebase
Commit: 9c77a5ebe955e708c51f9ea9c3cf3d747e880cd7
Parents: 7df1cc7
Author: lidongsjtu <li...@apache.org>
Authored: Mon Feb 29 19:26:56 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Mon Feb 29 19:26:56 2016 +0800

----------------------------------------------------------------------
 build/bin/find-hive-dependency.sh               |  11 +-
 .../apache/kylin/common/KylinConfigBase.java    |  12 +-
 .../test_case_data/sandbox/kylin.properties     |   3 +
 .../kylin/provision/BuildIIWithStream.java      |  17 ++-
 .../kylin/source/hive/HiveCmdBuilder.java       | 106 ++++++++++++++++++
 .../apache/kylin/source/hive/HiveMRInput.java   |  25 +++--
 .../kylin/source/hive/HiveCmdBuilderTest.java   |  78 +++++++++++++
 .../storage/hbase/steps/DeprecatedGCStep.java   |   6 +-
 .../storage/hbase/util/HiveCmdBuilder.java      | 109 +++++++++++++++++++
 .../storage/hbase/util/StorageCleanupJob.java   |  24 ++--
 10 files changed, 351 insertions(+), 40 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/build/bin/find-hive-dependency.sh
----------------------------------------------------------------------
diff --git a/build/bin/find-hive-dependency.sh b/build/bin/find-hive-dependency.sh
index 5994dda..171c5b1 100644
--- a/build/bin/find-hive-dependency.sh
+++ b/build/bin/find-hive-dependency.sh
@@ -17,7 +17,16 @@
 # limitations under the License.
 #
 
-hive_env=`hive -e set | grep 'env:CLASSPATH'`
+client_mode=`sh ${KYLIN_HOME}/bin/get-properties.sh kylin.hive.client`
+hive_env=
+
+if [ "${client_mode}" == "beeline" ]
+then
+    beeline_params=`sh ${KYLIN_HOME}/bin/get-properties.sh kylin.hive.beeline.params`
+    hive_env=`beeline ${beeline_params} --outputformat=dsv -e set | grep 'env:CLASSPATH'`
+else
+    hive_env=`hive -e set | grep 'env:CLASSPATH'`
+fi
 
 hive_classpath=`echo $hive_env | grep 'env:CLASSPATH' | awk -F '=' '{print $2}'`
 arr=(`echo $hive_classpath | cut -d ":"  --output-delimiter=" " -f 1-`)

http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 0cee9f8..826a28c 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -367,7 +367,7 @@ public class KylinConfigBase implements Serializable {
     public double getCubeAlgorithmAutoThreshold() {
         return Double.parseDouble(getOptional("kylin.cube.algorithm.auto.threshold", "8"));
     }
-    
+
     public int getCubeAggrGroupMaxSize() {
         return Integer.parseInt(getOptional("kylin.cube.aggrgroup.max.size", "12"));
     }
@@ -533,7 +533,7 @@ public class KylinConfigBase implements Serializable {
     public String getMailSender() {
         return getOptional("mail.sender", "");
     }
-    
+
     public boolean isWebCrossDomainEnabled() {
         return Boolean.parseBoolean(getOptional("crossdomain.enable", "true"));
     }
@@ -542,4 +542,12 @@ public class KylinConfigBase implements Serializable {
         return getMetadataUrl();
     }
 
+    public String getHiveClientMode() {
+        return getOptional("kylin.hive.client", "cli");
+    }
+
+    public String getHiveBeelineParams() {
+        return getOptional("kylin.hive.beeline.params", "");
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/examples/test_case_data/sandbox/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
index bf161fc..a304cab 100644
--- a/examples/test_case_data/sandbox/kylin.properties
+++ b/examples/test_case_data/sandbox/kylin.properties
@@ -20,6 +20,9 @@ kylin.storage.url=hbase
 # Temp folder in hdfs, make sure user has the right access to the hdfs directory
 kylin.hdfs.working.dir=/kylin
 
+# Parameters for beeline client
+kylin.hive.beeline.params=--hiveconf hive.security.authorization.sqlstd.confwhitelist.append='mapreduce.job.*|dfs.*' -u 'jdbc:hive2://localhost:10000'
+
 kylin.job.mapreduce.default.reduce.input.mb=500
 
 # If true, job engine will not assume that hadoop CLI reside on the same server as it self

http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/kylin-it/src/test/java/org/apache/kylin/provision/BuildIIWithStream.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildIIWithStream.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildIIWithStream.java
index 8436687..9b7cd14 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildIIWithStream.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildIIWithStream.java
@@ -74,6 +74,7 @@ import org.apache.kylin.job.constant.ExecutableConstants;
 import org.apache.kylin.job.engine.JobEngineConfig;
 import org.apache.kylin.metadata.model.TblColRef;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
+import org.apache.kylin.source.hive.HiveCmdBuilder;
 import org.apache.kylin.source.hive.HiveTableReader;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.ii.IICreateHTableJob;
@@ -147,15 +148,13 @@ public class BuildIIWithStream {
         }
 
         ShellExecutable step = new ShellExecutable();
-        StringBuffer buf = new StringBuffer();
-        buf.append("hive -e \"");
-        buf.append(useDatabaseHql + "\n");
-        buf.append(dropTableHql + "\n");
-        buf.append(createTableHql + "\n");
-        buf.append(insertDataHqls + "\n");
-        buf.append("\"");
-
-        step.setCmd(buf.toString());
+        HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
+        hiveCmdBuilder.addStatement(useDatabaseHql);
+        hiveCmdBuilder.addStatement(dropTableHql);
+        hiveCmdBuilder.addStatement(createTableHql);
+        hiveCmdBuilder.addStatement(insertDataHqls);
+
+        step.setCmd(hiveCmdBuilder.build());
         logger.info(step.getCmd());
         step.setName(ExecutableConstants.STEP_NAME_CREATE_FLAT_HIVE_TABLE);
         kylinConfig.getCliCommandExecutor().execute(step.getCmd(), null);

http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/source-hive/src/main/java/org/apache/kylin/source/hive/HiveCmdBuilder.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveCmdBuilder.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveCmdBuilder.java
new file mode 100644
index 0000000..f510780
--- /dev/null
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveCmdBuilder.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.hive;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+
+public class HiveCmdBuilder {
+    private static final Logger logger = LoggerFactory.getLogger(HiveCmdBuilder.class);
+
+    public enum HiveClientMode {
+        CLI, BEELINE
+    }
+
+    private HiveClientMode clientMode;
+    private KylinConfig kylinConfig;
+    final private ArrayList<String> statements = Lists.newArrayList();
+
+    public HiveCmdBuilder() {
+        kylinConfig = KylinConfig.getInstanceFromEnv();
+        clientMode = HiveClientMode.valueOf(kylinConfig.getHiveClientMode().toUpperCase());
+    }
+
+    public String build() {
+        StringBuffer buf = new StringBuffer();
+
+        switch (clientMode) {
+        case CLI:
+            buf.append("hive -e \"");
+            for (String statement : statements) {
+                buf.append(statement).append("\n");
+            }
+            buf.append("\"");
+            break;
+        case BEELINE:
+            BufferedWriter bw = null;
+            try {
+                File tmpHql = File.createTempFile("beeline_", ".hql");
+                StringBuffer hqlBuf = new StringBuffer();
+                bw = new BufferedWriter(new FileWriter(tmpHql));
+                for (String statement : statements) {
+                    bw.write(statement);
+                    bw.newLine();
+
+                    hqlBuf.append(statement).append("\n");
+                }
+                buf.append("beeline ");
+                buf.append(kylinConfig.getHiveBeelineParams());
+                buf.append(" -f ");
+                buf.append(tmpHql.getAbsolutePath());
+                buf.append(";rm -f ");
+                buf.append(tmpHql.getAbsolutePath());
+
+                logger.info("The statements to execute in beeline: \n" + hqlBuf);
+            } catch (IOException e) {
+                throw new RuntimeException(e);
+            } finally {
+                IOUtils.closeQuietly(bw);
+            }
+            break;
+        default:
+            throw new RuntimeException("Hive client cannot be recognized: " + clientMode);
+        }
+
+        return buf.toString();
+    }
+
+    public void reset() {
+        statements.clear();
+    }
+
+    public void addStatement(String statement) {
+        statements.add(statement);
+    }
+
+    @Override
+    public String toString() {
+        return build();
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
index b8d1333..873641d 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
@@ -23,6 +23,7 @@ import java.io.IOException;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hive.hcatalog.data.HCatRecord;
@@ -123,15 +124,14 @@ public class HiveMRInput implements IMRInput {
             }
 
             ShellExecutable step = new ShellExecutable();
-            StringBuilder buf = new StringBuilder();
-            buf.append("hive -e \"");
-            buf.append(useDatabaseHql + "\n");
-            buf.append(dropTableHql + "\n");
-            buf.append(createTableHql + "\n");
-            buf.append(insertDataHqls + "\n");
-            buf.append("\"");
-
-            step.setCmd(buf.toString());
+
+            HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
+            hiveCmdBuilder.addStatement(useDatabaseHql);
+            hiveCmdBuilder.addStatement(dropTableHql);
+            hiveCmdBuilder.addStatement(createTableHql);
+            hiveCmdBuilder.addStatement(insertDataHqls);
+
+            step.setCmd(hiveCmdBuilder.build());
             step.setName(ExecutableConstants.STEP_NAME_CREATE_FLAT_HIVE_TABLE);
 
             return step;
@@ -164,10 +164,11 @@ public class HiveMRInput implements IMRInput {
 
             final String hiveTable = this.getIntermediateTableIdentity();
             if (config.isHiveKeepFlatTable() == false && StringUtils.isNotEmpty(hiveTable)) {
-                final String dropSQL = "USE " + context.getConfig().getHiveDatabaseForIntermediateTable() + ";" + " DROP TABLE IF EXISTS  " + hiveTable + ";";
-                final String dropHiveCMD = "hive -e \"" + dropSQL + "\"";
+                final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
+                hiveCmdBuilder.addStatement("USE " + context.getConfig().getHiveDatabaseForIntermediateTable() + ";");
+                hiveCmdBuilder.addStatement("DROP TABLE IF EXISTS  " + hiveTable + ";");
                 try {
-                    config.getCliCommandExecutor().execute(dropHiveCMD);
+                    config.getCliCommandExecutor().execute(hiveCmdBuilder.build());
                     output.append("Hive table " + hiveTable + " is dropped. \n");
 
                     Path externalDataPath = new Path(getExternalDataPath());

http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/source-hive/src/test/java/org/apache/kylin/source/hive/HiveCmdBuilderTest.java
----------------------------------------------------------------------
diff --git a/source-hive/src/test/java/org/apache/kylin/source/hive/HiveCmdBuilderTest.java b/source-hive/src/test/java/org/apache/kylin/source/hive/HiveCmdBuilderTest.java
new file mode 100644
index 0000000..70c11b3
--- /dev/null
+++ b/source-hive/src/test/java/org/apache/kylin/source/hive/HiveCmdBuilderTest.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.hive;
+
+import static org.junit.Assert.*;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.commons.io.FileUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Created by dongli on 2/22/16.
+ */
+public class HiveCmdBuilderTest {
+
+    @Before
+    public void setup() {
+        System.setProperty("KYLIN_CONF", "../examples/test_case_data/localmeta");
+    }
+
+    @After
+    public void after() throws Exception {
+        System.clearProperty("kylin.hive.client");
+        System.clearProperty("kylin.hive.beeline.params");
+    }
+
+    @Test
+    public void testHiveCLI() {
+        System.setProperty("kylin.hive.client", "cli");
+
+        HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
+        hiveCmdBuilder.addStatement("USE default;");
+        hiveCmdBuilder.addStatement("DROP TABLE test;");
+        hiveCmdBuilder.addStatement("SHOW\n TABLES;");
+
+        assertEquals("hive -e \"USE default;\nDROP TABLE test;\nSHOW\n TABLES;\n\"", hiveCmdBuilder.build());
+    }
+
+    @Test
+    public void testBeeline() throws IOException {
+        System.setProperty("kylin.hive.client", "beeline");
+        System.setProperty("kylin.hive.beeline.params", "-u jdbc_url");
+
+        HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
+        hiveCmdBuilder.addStatement("USE default;");
+        hiveCmdBuilder.addStatement("DROP TABLE test;");
+        hiveCmdBuilder.addStatement("SHOW\n TABLES;");
+
+        String cmd = hiveCmdBuilder.build();
+        assertTrue(cmd.startsWith("beeline -u jdbc_url -f") && cmd.contains(";rm -f"));
+
+        String hqlFile = cmd.substring(cmd.lastIndexOf("-f ") + 3).trim();
+        String hqlStatement = FileUtils.readFileToString(new File(hqlFile));
+        assertEquals("USE default;\nDROP TABLE test;\nSHOW\n TABLES;\n", hqlStatement);
+
+        FileUtils.forceDelete(new File(hqlFile));
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
index bba16c3..735f967 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
@@ -38,6 +38,7 @@ import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableContext;
 import org.apache.kylin.job.execution.ExecuteResult;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
+import org.apache.kylin.storage.hbase.util.HiveCmdBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -86,8 +87,9 @@ public class DeprecatedGCStep extends AbstractExecutable {
         final String hiveTable = this.getOldHiveTable();
         if (StringUtils.isNotEmpty(hiveTable)) {
             final String dropSQL = "USE " + context.getConfig().getHiveDatabaseForIntermediateTable() + ";" + " DROP TABLE IF EXISTS  " + hiveTable + ";";
-            final String dropHiveCMD = "hive -e \"" + dropSQL + "\"";
-            context.getConfig().getCliCommandExecutor().execute(dropHiveCMD);
+            final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
+            hiveCmdBuilder.addStatement(dropSQL);
+            context.getConfig().getCliCommandExecutor().execute(hiveCmdBuilder.build());
             output.append("Dropped Hive table " + hiveTable + " \n");
         }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HiveCmdBuilder.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HiveCmdBuilder.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HiveCmdBuilder.java
new file mode 100644
index 0000000..f1ca4de
--- /dev/null
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HiveCmdBuilder.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.storage.hbase.util;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+
+/**
+ * Created by dongli on 2/29/16.
+ */
+public class HiveCmdBuilder {
+    private static final Logger logger = LoggerFactory.getLogger(HiveCmdBuilder.class);
+
+    public enum HiveClientMode {
+        CLI, BEELINE
+    }
+
+    private HiveClientMode clientMode;
+    private KylinConfig kylinConfig;
+    final private ArrayList<String> statements = Lists.newArrayList();
+
+    public HiveCmdBuilder() {
+        kylinConfig = KylinConfig.getInstanceFromEnv();
+        clientMode = HiveClientMode.valueOf(kylinConfig.getHiveClientMode().toUpperCase());
+    }
+
+    public String build() {
+        StringBuffer buf = new StringBuffer();
+
+        switch (clientMode) {
+            case CLI:
+                buf.append("hive -e \"");
+                for (String statement : statements) {
+                    buf.append(statement).append("\n");
+                }
+                buf.append("\"");
+                break;
+            case BEELINE:
+                BufferedWriter bw = null;
+                try {
+                    File tmpHql = File.createTempFile("beeline_", ".hql");
+                    StringBuffer hqlBuf = new StringBuffer();
+                    bw = new BufferedWriter(new FileWriter(tmpHql));
+                    for (String statement : statements) {
+                        bw.write(statement);
+                        bw.newLine();
+
+                        hqlBuf.append(statement).append("\n");
+                    }
+                    buf.append("beeline ");
+                    buf.append(kylinConfig.getHiveBeelineParams());
+                    buf.append(" -f ");
+                    buf.append(tmpHql.getAbsolutePath());
+                    buf.append(";rm -f ");
+                    buf.append(tmpHql.getAbsolutePath());
+
+                    logger.info("The statements to execute in beeline: \n" + hqlBuf);
+                } catch (IOException e) {
+                    throw new RuntimeException(e);
+                } finally {
+                    IOUtils.closeQuietly(bw);
+                }
+                break;
+            default:
+                throw new RuntimeException("Hive client cannot be recognized: " + clientMode);
+        }
+
+        return buf.toString();
+    }
+
+    public void reset() {
+        statements.clear();
+    }
+
+    public void addStatement(String statement) {
+        statements.add(statement);
+    }
+
+    @Override
+    public String toString() {
+        return build();
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/9c77a5eb/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
index 2137f57..c010d51 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
@@ -240,13 +240,11 @@ public class StorageCleanupJob extends AbstractHadoopJob {
         final int uuidLength = 36;
         
         final String useDatabaseHql = "USE " + config.getHiveDatabaseForIntermediateTable() + ";";
-        StringBuilder buf = new StringBuilder();
-        buf.append("hive -e \"");
-        buf.append(useDatabaseHql);
-        buf.append("show tables " + "\'kylin_intermediate_*\'" + "; ");
-        buf.append("\"");
-        
-        Pair<Integer, String> result = cmdExec.execute(buf.toString());
+        final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
+        hiveCmdBuilder.addStatement(useDatabaseHql);
+        hiveCmdBuilder.addStatement("show tables " + "\'kylin_intermediate_*\'" + "; ");
+
+        Pair<Integer, String> result = cmdExec.execute(hiveCmdBuilder.build());
 
         String outputStr = result.getSecond();
         BufferedReader reader = new BufferedReader(new StringReader(outputStr));
@@ -282,17 +280,15 @@ public class StorageCleanupJob extends AbstractHadoopJob {
         }
 
         if (delete == true) {
-            buf.delete(0, buf.length());
-            buf.append("hive -e \"");
-            buf.append(useDatabaseHql);
+            hiveCmdBuilder.reset();
+            hiveCmdBuilder.addStatement(useDatabaseHql);
             for (String delHive : allHiveTablesNeedToBeDeleted) {
-                buf.append("drop table if exists " + delHive + "; ");
+                hiveCmdBuilder.addStatement("drop table if exists " + delHive + "; ");
                 logger.info("Remove " + delHive + " from hive tables.");
             }
-            buf.append("\"");
-            
+
             try {
-                cmdExec.execute(buf.toString());
+                cmdExec.execute(hiveCmdBuilder.build());
             } catch (IOException e) {
                 e.printStackTrace();
             }


[21/43] kylin git commit: KYLIN-1311 on the way

Posted by sh...@apache.org.
KYLIN-1311 on the way


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/1c4deab9
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/1c4deab9
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/1c4deab9

Branch: refs/heads/helix-rebase
Commit: 1c4deab9cbeef77c04764b5138ec730e33fbfdb7
Parents: b5ee2df
Author: shaofengshi <sh...@apache.org>
Authored: Fri Jan 22 11:01:48 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:19 2016 +0800

----------------------------------------------------------------------
 build/bin/kylin.sh                              |   8 +-
 .../test_case_data/localmeta/kylin.properties   |   2 +-
 server/pom.xml                                  |   1 +
 .../rest/controller/ClusterController.java      |  71 ++
 .../kylin/rest/controller/JobController.java    |  33 -
 .../rest/controller/StreamingController.java    |  68 +-
 .../kylin/rest/helix/HelixClusterAdmin.java     |  31 +-
 .../rest/helix/JobEngineTransitionHandler.java  |  70 ++
 .../helix/LeaderStandbyStateModelFactory.java   | 125 +---
 .../helix/StreamCubeBuildTransitionHandler.java | 107 +++
 .../rest/request/StreamingBuildRequest.java     |  29 +-
 .../security/KylinAuthenticationProvider.java   |   3 +-
 .../kylin/rest/service/StreamingService.java    |  34 +-
 .../rest/controller/UserControllerTest.java     |   9 -
 .../kylin/rest/helix/HelixClusterAdminTest.java |  22 +-
 .../kylin/rest/service/CacheServiceTest.java    | 720 +++++++++----------
 .../kylin/rest/service/ServiceTestBase.java     |  40 +-
 .../rest/service/TestBaseWithZookeeper.java     |  74 ++
 .../source/kafka/TimedJsonStreamParser.java     |   7 +-
 19 files changed, 825 insertions(+), 629 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/build/bin/kylin.sh
----------------------------------------------------------------------
diff --git a/build/bin/kylin.sh b/build/bin/kylin.sh
index 074acce..43ffc11 100644
--- a/build/bin/kylin.sh
+++ b/build/bin/kylin.sh
@@ -75,7 +75,7 @@ then
 
     if [ -z "$KYLIN_REST_ADDRESS" ]
     then
-        kylin_rest_address=`hostname`":"`grep "<Connector port=" ${tomcat_root}/conf/server.xml |grep protocol=\"HTTP/1.1\" | cut -d '=' -f 2 | cut -d \" -f 2`
+        kylin_rest_address=`hostname -f`":"`grep "<Connector port=" ${tomcat_root}/conf/server.xml |grep protocol=\"HTTP/1.1\" | cut -d '=' -f 2 | cut -d \" -f 2`
         echo "KYLIN_REST_ADDRESS not found, will use ${kylin_rest_address}"
     else
         echo "KYLIN_REST_ADDRESS is set to: $KYLIN_REST_ADDRESS"
@@ -159,12 +159,12 @@ then
         exit 0
     elif [ "$2" == "stop" ]
     then
-        if [ ! -f "${KYLIN_HOME}/$3_$4" ]
+        if [ ! -f "${KYLIN_HOME}/logs/$3_$4" ]
             then
                 echo "streaming is not running, please check"
                 exit 1
             fi
-            pid=`cat ${KYLIN_HOME}/$3_$4`
+            pid=`cat ${KYLIN_HOME}/logs/$3_$4`
             if [ "$pid" = "" ]
             then
                 echo "streaming is not running, please check"
@@ -173,7 +173,7 @@ then
                 echo "stopping streaming:$pid"
                 kill $pid
             fi
-            rm ${KYLIN_HOME}/$3_$4
+            rm ${KYLIN_HOME}/logs/$3_$4
             exit 0
         else
             echo

http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/examples/test_case_data/localmeta/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/kylin.properties b/examples/test_case_data/localmeta/kylin.properties
index 978102f..41a9895 100644
--- a/examples/test_case_data/localmeta/kylin.properties
+++ b/examples/test_case_data/localmeta/kylin.properties
@@ -6,7 +6,7 @@
 kylin.owner=whoami@kylin.apache.org
 
 # List of web servers in use, this enables one web server instance to sync up with other servers.
-#kylin.rest.servers=localhost:7070
+kylin.rest.servers=localhost:7070
 
 # The metadata store in hbase
 kylin.metadata.url=

http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/server/pom.xml
----------------------------------------------------------------------
diff --git a/server/pom.xml b/server/pom.xml
index 86ec5a5..2359855 100644
--- a/server/pom.xml
+++ b/server/pom.xml
@@ -466,6 +466,7 @@
             <groupId>org.apache.zookeeper</groupId>
             <artifactId>zookeeper</artifactId>
             <version>${zookeeper.version}</version>
+            <scope>provided</scope>
             <exclusions>
                 <exclusion>
                     <groupId>junit</groupId>

http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/server/src/main/java/org/apache/kylin/rest/controller/ClusterController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/ClusterController.java b/server/src/main/java/org/apache/kylin/rest/controller/ClusterController.java
new file mode 100644
index 0000000..97fff36
--- /dev/null
+++ b/server/src/main/java/org/apache/kylin/rest/controller/ClusterController.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.rest.controller;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.job.JobInstance;
+import org.apache.kylin.job.constant.JobStatusEnum;
+import org.apache.kylin.job.constant.JobTimeFilterEnum;
+import org.apache.kylin.rest.exception.InternalErrorException;
+import org.apache.kylin.rest.helix.HelixClusterAdmin;
+import org.apache.kylin.rest.request.JobListRequest;
+import org.apache.kylin.rest.service.JobService;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.InitializingBean;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestMethod;
+import org.springframework.web.bind.annotation.ResponseBody;
+
+import java.util.*;
+
+/**
+ * 
+ */
+@Controller
+@RequestMapping(value = "cluster")
+public class ClusterController extends BasicController implements InitializingBean {
+    private static final Logger logger = LoggerFactory.getLogger(ClusterController.class);
+
+    /*
+     * (non-Javadoc)
+     * 
+     * @see
+     * org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
+     */
+    @Override
+    public void afterPropertiesSet() throws Exception {
+
+        final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+
+        final HelixClusterAdmin clusterAdmin = HelixClusterAdmin.getInstance(kylinConfig);
+        clusterAdmin.start();
+
+        Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
+            @Override
+            public void run() {
+                clusterAdmin.stop();
+            }
+        }));
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/JobController.java b/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
index 77d987f..a61635d 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
@@ -61,42 +61,9 @@ public class JobController extends BasicController implements InitializingBean {
      */
     @Override
     public void afterPropertiesSet() throws Exception {
-
         String timeZone = jobService.getConfig().getTimeZone();
         TimeZone tzone = TimeZone.getTimeZone(timeZone);
         TimeZone.setDefault(tzone);
-
-        final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-
-        if (kylinConfig.isClusterEnabled() == true) {
-            logger.info("Kylin cluster enabled, will use Helix/zookeeper to coordinate.");
-            final HelixClusterAdmin clusterAdmin = HelixClusterAdmin.getInstance(kylinConfig);
-            clusterAdmin.start();
-
-            Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
-                @Override
-                public void run() {
-                    clusterAdmin.stop();
-                }
-            }));
-        } else {
-            new Thread(new Runnable() {
-                @Override
-                public void run() {
-                    try {
-                        DefaultScheduler scheduler = DefaultScheduler.createInstance();
-                        scheduler.init(new JobEngineConfig(kylinConfig), new ZookeeperJobLock());
-                        if (!scheduler.hasStarted()) {
-                            logger.error("scheduler has not been started");
-                            System.exit(1);
-                        }
-                    } catch (Exception e) {
-                        throw new RuntimeException(e);
-                    }
-                }
-            }).start();
-        }
-
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java b/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
index fb806d1..209c552 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
@@ -26,11 +26,7 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeSegment;
-import org.apache.kylin.cube.model.CubeBuildTypeEnum;
-import org.apache.kylin.engine.streaming.BootstrapConfig;
 import org.apache.kylin.engine.streaming.StreamingConfig;
-import org.apache.kylin.job.JobInstance;
-import org.apache.kylin.job.exception.JobException;
 import org.apache.kylin.rest.exception.BadRequestException;
 import org.apache.kylin.rest.exception.ForbiddenException;
 import org.apache.kylin.rest.exception.InternalErrorException;
@@ -45,7 +41,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.security.access.AccessDeniedException;
-import org.springframework.security.core.context.SecurityContextHolder;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.*;
 
@@ -93,7 +88,6 @@ public class StreamingController extends BasicController {
         }
     }
 
-
     /**
      *
      * create Streaming Schema
@@ -105,7 +99,7 @@ public class StreamingController extends BasicController {
         //Update Model
         StreamingConfig streamingConfig = deserializeSchemalDesc(streamingRequest);
         KafkaConfig kafkaConfig = deserializeKafkaSchemalDesc(streamingRequest);
-        if (streamingConfig == null ||kafkaConfig == null) {
+        if (streamingConfig == null || kafkaConfig == null) {
             return streamingRequest;
         }
         if (StringUtils.isEmpty(streamingConfig.getName())) {
@@ -124,7 +118,7 @@ public class StreamingController extends BasicController {
         try {
             kafkaConfig.setUuid(UUID.randomUUID().toString());
             kafkaConfigService.createKafkaConfig(kafkaConfig);
-        }catch (IOException e){
+        } catch (IOException e) {
             try {
                 streamingService.dropStreamingConfig(streamingConfig);
             } catch (IOException e1) {
@@ -139,7 +133,7 @@ public class StreamingController extends BasicController {
 
     @RequestMapping(value = "", method = { RequestMethod.PUT })
     @ResponseBody
-        public StreamingRequest updateModelDesc(@RequestBody StreamingRequest streamingRequest) throws JsonProcessingException {
+    public StreamingRequest updateModelDesc(@RequestBody StreamingRequest streamingRequest) throws JsonProcessingException {
         StreamingConfig streamingConfig = deserializeSchemalDesc(streamingRequest);
         KafkaConfig kafkaConfig = deserializeKafkaSchemalDesc(streamingRequest);
 
@@ -156,7 +150,7 @@ public class StreamingController extends BasicController {
         }
         try {
             kafkaConfig = kafkaConfigService.updateKafkaConfig(kafkaConfig);
-        }catch (AccessDeniedException accessDeniedException) {
+        } catch (AccessDeniedException accessDeniedException) {
             throw new ForbiddenException("You don't have right to update this KafkaConfig.");
         } catch (Exception e) {
             logger.error("Failed to deal with the request:" + e.getLocalizedMessage(), e);
@@ -203,7 +197,6 @@ public class StreamingController extends BasicController {
         return desc;
     }
 
-
     private KafkaConfig deserializeKafkaSchemalDesc(StreamingRequest streamingRequest) {
         KafkaConfig desc = null;
         try {
@@ -227,16 +220,14 @@ public class StreamingController extends BasicController {
         request.setMessage(message);
     }
 
-
-
     /**
      * Send a stream build request
      *
-     * @param cubeName Cube ID
+     * @param cubeName Cube Name
      * @return
      * @throws IOException
      */
-    @RequestMapping(value = "/{cubeName}/build", method = {RequestMethod.PUT})
+    @RequestMapping(value = "/{cubeName}/build", method = { RequestMethod.PUT })
     @ResponseBody
     public StreamingBuildRequest buildStream(@PathVariable String cubeName, @RequestBody StreamingBuildRequest streamingBuildRequest) {
         StreamingConfig streamingConfig = streamingService.getStreamingManager().getStreamingConfigByCube(cubeName);
@@ -244,27 +235,54 @@ public class StreamingController extends BasicController {
         List<CubeInstance> cubes = cubeService.getCubes(cubeName, null, null, null, null);
         Preconditions.checkArgument(cubes.size() == 1, "Cube '" + cubeName + "' is not found.");
         CubeInstance cube = cubes.get(0);
-        if (streamingBuildRequest.isFillGap() == false) {
-            Preconditions.checkArgument(streamingBuildRequest.getEnd() > streamingBuildRequest.getStart(), "End time should be greater than start time.");
-            for (CubeSegment segment : cube.getSegments()) {
-                if (segment.getDateRangeStart() <= streamingBuildRequest.getStart() && segment.getDateRangeEnd() >= streamingBuildRequest.getEnd()) {
-                    streamingBuildRequest.setMessage("The segment already exists: " + segment.toString());
-                    streamingBuildRequest.setSuccessful(false);
-                    return streamingBuildRequest;
-                }
+        if (streamingBuildRequest.getEnd() <= streamingBuildRequest.getStart()) {
+            streamingBuildRequest.setMessage("End time should be greater than start time.");streamingBuildRequest.setSuccessful(false);
+            return streamingBuildRequest;
+        }
+
+        for (CubeSegment segment : cube.getSegments()) {
+            if (segment.getDateRangeStart() <= streamingBuildRequest.getStart() && segment.getDateRangeEnd() >= streamingBuildRequest.getEnd()) {
+                streamingBuildRequest.setMessage("The segment already exists: " + segment.toString());
+                streamingBuildRequest.setSuccessful(false);
+                return streamingBuildRequest;
             }
         }
 
         streamingBuildRequest.setStreaming(streamingConfig.getName());
-        streamingService.buildStream(cubeName, streamingBuildRequest);
+        streamingService.buildStream(cube, streamingBuildRequest);
         streamingBuildRequest.setMessage("Build request is submitted successfully.");
         streamingBuildRequest.setSuccessful(true);
         return streamingBuildRequest;
 
     }
 
+    /**
+     * Send a stream fillGap request
+     *
+     * @param cubeName Cube Name
+     * @return
+     * @throws IOException
+     */
+    @RequestMapping(value = "/{cubeName}/fillgap", method = { RequestMethod.PUT })
+    @ResponseBody
+    public StreamingBuildRequest fillGap(@PathVariable String cubeName) {
+        StreamingConfig streamingConfig = streamingService.getStreamingManager().getStreamingConfigByCube(cubeName);
+        Preconditions.checkNotNull(streamingConfig, "Stream config for '" + cubeName + "' is not found.");
+        List<CubeInstance> cubes = cubeService.getCubes(cubeName, null, null, null, null);
+        Preconditions.checkArgument(cubes.size() == 1, "Cube '" + cubeName + "' is not found.");
+        CubeInstance cube = cubes.get(0);
+
+        StreamingBuildRequest streamingBuildRequest = new StreamingBuildRequest();
+        streamingBuildRequest.setStreaming(streamingConfig.getName());
+        streamingService.fillGap(cube);
+        streamingBuildRequest.setMessage("FillGap request is submitted successfully.");
+        streamingBuildRequest.setSuccessful(true);
+        return streamingBuildRequest;
+
+    }
+
     public void setStreamingService(StreamingService streamingService) {
-        this.streamingService= streamingService;
+        this.streamingService = streamingService;
     }
 
     public void setKafkaConfigService(KafkaConfigService kafkaConfigService) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java b/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
index 9850e24..0758ef1 100644
--- a/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
+++ b/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
@@ -33,8 +33,10 @@ import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.restclient.Broadcaster;
 import org.apache.kylin.common.util.StringUtil;
 import org.apache.kylin.rest.constant.Constant;
+import org.apache.kylin.rest.request.StreamingBuildRequest;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
+import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -126,14 +128,13 @@ public class HelixClusterAdmin {
 
     }
 
-    public void addStreamingJob(String streamingName, long start, long end) {
-        String resourceName = RESOURCE_STREAME_CUBE_PREFIX + streamingName + "_" + start + "_" + end;
-        if (!admin.getResourcesInCluster(clusterName).contains(resourceName)) {
-            admin.addResource(clusterName, resourceName, 1, MODEL_LEADER_STANDBY, IdealState.RebalanceMode.SEMI_AUTO.name());
-        } else {
-            logger.warn("Resource '" + resourceName + "' already exists in cluster, skip adding.");
+    public void addStreamingJob(StreamingBuildRequest streamingBuildRequest) {
+        String resourceName = streamingBuildRequest.toResourceName();
+        if (admin.getResourcesInCluster(clusterName).contains(resourceName)) {
+            logger.warn("Resource '" + resourceName + "' already exists in cluster, remove and re-add.");
+            admin.dropResource(clusterName, resourceName);
         }
-
+        admin.addResource(clusterName, resourceName, 1, MODEL_LEADER_STANDBY, IdealState.RebalanceMode.SEMI_AUTO.name());
         admin.rebalance(clusterName, resourceName, 2, "", TAG_STREAM_BUILDER);
 
     }
@@ -150,7 +151,7 @@ public class HelixClusterAdmin {
      */
     protected void startInstance(String instanceName) throws Exception {
         participantManager = HelixManagerFactory.getZKHelixManager(clusterName, instanceName, InstanceType.PARTICIPANT, zkAddress);
-        participantManager.getStateMachineEngine().registerStateModelFactory(StateModelDefId.from(MODEL_LEADER_STANDBY), new LeaderStandbyStateModelFactory());
+        participantManager.getStateMachineEngine().registerStateModelFactory(StateModelDefId.from(MODEL_LEADER_STANDBY), new LeaderStandbyStateModelFactory(this.kylinConfig));
         participantManager.connect();
         participantManager.addLiveInstanceChangeListener(new KylinClusterLiveInstanceChangeListener());
 
@@ -179,10 +180,12 @@ public class HelixClusterAdmin {
     public void stop() {
         if (participantManager != null) {
             participantManager.disconnect();
+            participantManager = null;
         }
 
         if (controllerManager != null) {
             controllerManager.disconnect();
+            controllerManager = null;
         }
     }
 
@@ -269,11 +272,13 @@ public class HelixClusterAdmin {
                 int indexOfUnderscore = instanceName.lastIndexOf("_");
                 instanceRestAddresses.add(instanceName.substring(0, indexOfUnderscore) + ":" + instanceName.substring(indexOfUnderscore + 1));
             }
-            String restServersInCluster = StringUtil.join(instanceRestAddresses, ",");
-            kylinConfig.setProperty("kylin.rest.servers", restServersInCluster);
-            System.setProperty("kylin.rest.servers", restServersInCluster);
-            logger.info("kylin.rest.servers update to " + restServersInCluster);
-            Broadcaster.clearCache();
+            if (instanceRestAddresses.size() > 0) {
+                String restServersInCluster = StringUtil.join(instanceRestAddresses, ",");
+                kylinConfig.setProperty("kylin.rest.servers", restServersInCluster);
+                System.setProperty("kylin.rest.servers", restServersInCluster);
+                logger.info("kylin.rest.servers update to " + restServersInCluster);
+                Broadcaster.clearCache();
+            }
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/server/src/main/java/org/apache/kylin/rest/helix/JobEngineTransitionHandler.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/helix/JobEngineTransitionHandler.java b/server/src/main/java/org/apache/kylin/rest/helix/JobEngineTransitionHandler.java
new file mode 100644
index 0000000..3ef04ee
--- /dev/null
+++ b/server/src/main/java/org/apache/kylin/rest/helix/JobEngineTransitionHandler.java
@@ -0,0 +1,70 @@
+package org.apache.kylin.rest.helix;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Maps;
+import org.apache.helix.NotificationContext;
+import org.apache.helix.api.TransitionHandler;
+import org.apache.helix.model.Message;
+import org.apache.helix.participant.statemachine.Transition;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
+import org.apache.kylin.job.lock.MockJobLock;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.concurrent.ConcurrentMap;
+
+/**
+ */
+public class JobEngineTransitionHandler extends TransitionHandler {
+    private static final Logger logger = LoggerFactory.getLogger(JobEngineTransitionHandler.class);
+    private final KylinConfig kylinConfig;
+
+    private static ConcurrentMap<KylinConfig, JobEngineTransitionHandler> instanceMaps = Maps.newConcurrentMap();
+
+    private JobEngineTransitionHandler(KylinConfig kylinConfig) {
+        this.kylinConfig = kylinConfig;
+    }
+
+    public static JobEngineTransitionHandler getInstance(KylinConfig kylinConfig) {
+        Preconditions.checkNotNull(kylinConfig);
+        instanceMaps.putIfAbsent(kylinConfig, new JobEngineTransitionHandler(kylinConfig));
+        return instanceMaps.get(kylinConfig);
+    }
+
+    @Transition(to = "LEADER", from = "STANDBY")
+    public void onBecomeLeaderFromStandby(Message message, NotificationContext context) {
+        logger.info("JobEngineStateModel.onBecomeLeaderFromStandby()");
+        try {
+            DefaultScheduler scheduler = DefaultScheduler.createInstance();
+            scheduler.init(new JobEngineConfig(this.kylinConfig), new MockJobLock());
+            while (!scheduler.hasStarted()) {
+                logger.error("scheduler has not been started");
+                Thread.sleep(1000);
+            }
+        } catch (Exception e) {
+            logger.error("error start DefaultScheduler", e);
+            throw new RuntimeException(e);
+        }
+    }
+
+    @Transition(to = "STANDBY", from = "LEADER")
+    public void onBecomeStandbyFromLeader(Message message, NotificationContext context) {
+        logger.info("JobEngineStateModel.onBecomeStandbyFromLeader()");
+        DefaultScheduler.destroyInstance();
+
+    }
+
+    @Transition(to = "STANDBY", from = "OFFLINE")
+    public void onBecomeStandbyFromOffline(Message message, NotificationContext context) {
+        logger.info("JobEngineStateModel.onBecomeStandbyFromOffline()");
+
+    }
+
+    @Transition(to = "OFFLINE", from = "STANDBY")
+    public void onBecomeOfflineFromStandby(Message message, NotificationContext context) {
+        logger.info("JobEngineStateModel.onBecomeOfflineFromStandby()");
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java b/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
index 8614e8c..940c9c2 100644
--- a/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
+++ b/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
@@ -1,146 +1,35 @@
 package org.apache.kylin.rest.helix;
 
-import com.google.common.base.Preconditions;
-import org.apache.helix.NotificationContext;
 import org.apache.helix.api.StateTransitionHandlerFactory;
 import org.apache.helix.api.TransitionHandler;
 import org.apache.helix.api.id.PartitionId;
 import org.apache.helix.api.id.ResourceId;
-import org.apache.helix.model.Message;
-import org.apache.helix.participant.statemachine.Transition;
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.KylinConfigBase;
-import org.apache.kylin.cube.CubeInstance;
-import org.apache.kylin.cube.CubeManager;
-import org.apache.kylin.cube.CubeSegment;
-import org.apache.kylin.engine.streaming.StreamingManager;
-import org.apache.kylin.job.engine.JobEngineConfig;
-import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
-import org.apache.kylin.job.lock.MockJobLock;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-
 import static org.apache.kylin.rest.helix.HelixClusterAdmin.RESOURCE_STREAME_CUBE_PREFIX;
 
 /**
  */
 public class LeaderStandbyStateModelFactory extends StateTransitionHandlerFactory<TransitionHandler> {
-    private static final Logger logger = LoggerFactory.getLogger(LeaderStandbyStateModelFactory.class);
+    private final KylinConfig kylinConfig;
+
+    public LeaderStandbyStateModelFactory(KylinConfig kylinConfig) {
+        this.kylinConfig = kylinConfig;
+    }
 
     @Override
     public TransitionHandler createStateTransitionHandler(PartitionId partitionId) {
         if (partitionId.getResourceId().equals(ResourceId.from(HelixClusterAdmin.RESOURCE_NAME_JOB_ENGINE))) {
-            return JobEngineStateModel.INSTANCE;
+            return JobEngineTransitionHandler.getInstance(kylinConfig);
         }
 
         if (partitionId.getResourceId().stringify().startsWith(RESOURCE_STREAME_CUBE_PREFIX)) {
-            return StreamCubeStateModel.INSTANCE;
+            return StreamCubeBuildTransitionHandler.getInstance(kylinConfig);
         }
 
         return null;
     }
 
-    public static class JobEngineStateModel extends TransitionHandler {
-
-        public static JobEngineStateModel INSTANCE = new JobEngineStateModel();
-
-        @Transition(to = "LEADER", from = "STANDBY")
-        public void onBecomeLeaderFromStandby(Message message, NotificationContext context) {
-            logger.info("JobEngineStateModel.onBecomeLeaderFromStandby()");
-            try {
-                final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-                DefaultScheduler scheduler = DefaultScheduler.createInstance();
-                scheduler.init(new JobEngineConfig(kylinConfig), new MockJobLock());
-                while (!scheduler.hasStarted()) {
-                    logger.error("scheduler has not been started");
-                    Thread.sleep(1000);
-                }
-            } catch (Exception e) {
-                logger.error("error start DefaultScheduler", e);
-                throw new RuntimeException(e);
-            }
-        }
-
-        @Transition(to = "STANDBY", from = "LEADER")
-        public void onBecomeStandbyFromLeader(Message message, NotificationContext context) {
-            logger.info("JobEngineStateModel.onBecomeStandbyFromLeader()");
-            DefaultScheduler.destroyInstance();
-
-        }
-
-        @Transition(to = "STANDBY", from = "OFFLINE")
-        public void onBecomeStandbyFromOffline(Message message, NotificationContext context) {
-            logger.info("JobEngineStateModel.onBecomeStandbyFromOffline()");
-
-        }
-
-        @Transition(to = "OFFLINE", from = "STANDBY")
-        public void onBecomeOfflineFromStandby(Message message, NotificationContext context) {
-            logger.info("JobEngineStateModel.onBecomeOfflineFromStandby()");
-
-        }
-    }
-
-    public static class StreamCubeStateModel extends TransitionHandler {
-
-        public static StreamCubeStateModel INSTANCE = new StreamCubeStateModel();
-
-        @Transition(to = "LEADER", from = "STANDBY")
-        public void onBecomeLeaderFromStandby(Message message, NotificationContext context) {
-            String resourceName = message.getResourceId().stringify();
-            Preconditions.checkArgument(resourceName.startsWith(RESOURCE_STREAME_CUBE_PREFIX));
-            long end = Long.parseLong(resourceName.substring(resourceName.lastIndexOf("_") + 1));
-            String temp = resourceName.substring(RESOURCE_STREAME_CUBE_PREFIX.length(), resourceName.lastIndexOf("_"));
-            long start = Long.parseLong(temp.substring(temp.lastIndexOf("_") + 1));
-            String streamingConfig = temp.substring(0, temp.lastIndexOf("_"));
-
-            final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-            
-            final String cubeName = StreamingManager.getInstance(kylinConfig).getStreamingConfig(streamingConfig).getCubeName();
-            final CubeInstance cube = CubeManager.getInstance(kylinConfig).getCube(cubeName);
-            for (CubeSegment segment : cube.getSegments()) {
-                if (segment.getDateRangeStart() <= start && segment.getDateRangeEnd() >= end) {
-                    logger.info("Segment " + segment.getName() + " already exist, no need rebuild.");
-                    return;
-                }
-            }
-            
-            KylinConfigBase.getKylinHome();
-            String segmentId = start + "_" + end;
-            String cmd = KylinConfigBase.getKylinHome() + "/bin/kylin.sh streaming start " + streamingConfig + " " + segmentId + " -oneoff true -start " + start + " -end " + end + " -streaming " + streamingConfig;
-            logger.info("Executing: " + cmd);
-            try {
-                String line;
-                Process p = Runtime.getRuntime().exec(cmd);
-                BufferedReader input = new BufferedReader(new InputStreamReader(p.getInputStream()));
-                while ((line = input.readLine()) != null) {
-                    logger.info(line);
-                }
-                input.close();
-            } catch (IOException err) {
-                logger.error("Error happens during build streaming  '" + resourceName + "'", err);
-                throw new RuntimeException(err);
-            }
-
-        }
-
-        @Transition(to = "STANDBY", from = "LEADER")
-        public void onBecomeStandbyFromLeader(Message message, NotificationContext context) {
-
-        }
-
-        @Transition(to = "STANDBY", from = "OFFLINE")
-        public void onBecomeStandbyFromOffline(Message message, NotificationContext context) {
-
-        }
-
-        @Transition(to = "OFFLINE", from = "STANDBY")
-        public void onBecomeOfflineFromStandby(Message message, NotificationContext context) {
-
-        }
-    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java b/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java
new file mode 100644
index 0000000..44d8302
--- /dev/null
+++ b/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java
@@ -0,0 +1,107 @@
+package org.apache.kylin.rest.helix;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Maps;
+import org.apache.helix.NotificationContext;
+import org.apache.helix.api.TransitionHandler;
+import org.apache.helix.model.Message;
+import org.apache.helix.participant.statemachine.Transition;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.KylinConfigBase;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.engine.streaming.StreamingManager;
+import org.apache.kylin.rest.request.StreamingBuildRequest;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.util.concurrent.ConcurrentMap;
+
+/**
+ */
+public class StreamCubeBuildTransitionHandler extends TransitionHandler {
+
+    private static final Logger logger = LoggerFactory.getLogger(StreamCubeBuildTransitionHandler.class);
+
+    private static ConcurrentMap<KylinConfig, StreamCubeBuildTransitionHandler> instanceMaps = Maps.newConcurrentMap();
+    private final KylinConfig kylinConfig;
+
+    private StreamCubeBuildTransitionHandler(KylinConfig kylinConfig) {
+        this.kylinConfig = kylinConfig;
+    }
+
+    public static StreamCubeBuildTransitionHandler getInstance(KylinConfig kylinConfig) {
+        Preconditions.checkNotNull(kylinConfig);
+        instanceMaps.putIfAbsent(kylinConfig, new StreamCubeBuildTransitionHandler(kylinConfig));
+        return instanceMaps.get(kylinConfig);
+    }
+
+    @Transition(to = "LEADER", from = "STANDBY")
+    public void onBecomeLeaderFromStandby(Message message, NotificationContext context) {
+        String resourceName = message.getResourceId().stringify();
+        StreamingBuildRequest streamingBuildRequest = StreamingBuildRequest.fromResourceName(resourceName);
+
+        final String cubeName = StreamingManager.getInstance(kylinConfig).getStreamingConfig(streamingBuildRequest.getStreaming()).getCubeName();
+        final CubeInstance cube = CubeManager.getInstance(kylinConfig).getCube(cubeName);
+        for (CubeSegment segment : cube.getSegments()) {
+            if (segment.getDateRangeStart() <= streamingBuildRequest.getStart() && segment.getDateRangeEnd() >= streamingBuildRequest.getEnd()) {
+                logger.info("Segment " + segment.getName() + " already exist, no need rebuild.");
+                return;
+            }
+        }
+
+        KylinConfigBase.getKylinHome();
+        String segmentId = streamingBuildRequest.getStart() + "_" + streamingBuildRequest.getEnd();
+        String cmd = KylinConfigBase.getKylinHome() + "/bin/kylin.sh streaming start " + streamingBuildRequest.getStreaming() + " " + segmentId + " -oneoff true -start " + streamingBuildRequest.getStart() + " -end " + streamingBuildRequest.getEnd() + " -streaming " + streamingBuildRequest.getStreaming();
+        logger.info("Executing: " + cmd);
+        try {
+            String line;
+            Process p = Runtime.getRuntime().exec(cmd);
+            BufferedReader input = new BufferedReader(new InputStreamReader(p.getInputStream()));
+            while ((line = input.readLine()) != null) {
+                logger.info(line);
+            }
+            input.close();
+        } catch (IOException err) {
+            logger.error("Error happens during build streaming  '" + resourceName + "'", err);
+            throw new RuntimeException(err);
+        }
+
+    }
+
+    @Transition(to = "STANDBY", from = "LEADER")
+    public void onBecomeStandbyFromLeader(Message message, NotificationContext context) {
+        String resourceName = message.getResourceId().stringify();
+        StreamingBuildRequest streamingBuildRequest = StreamingBuildRequest.fromResourceName(resourceName);
+        KylinConfigBase.getKylinHome();
+        String segmentId = streamingBuildRequest.getStart() + "_" + streamingBuildRequest.getEnd();
+        String cmd = KylinConfigBase.getKylinHome() + "/bin/kylin.sh streaming stop " + streamingBuildRequest.getStreaming() + " " + segmentId;
+        logger.info("Executing: " + cmd);
+        try {
+            String line;
+            Process p = Runtime.getRuntime().exec(cmd);
+            BufferedReader input = new BufferedReader(new InputStreamReader(p.getInputStream()));
+            while ((line = input.readLine()) != null) {
+                logger.info(line);
+            }
+            input.close();
+        } catch (IOException err) {
+            logger.error("Error happens during build streaming  '" + resourceName + "'", err);
+            throw new RuntimeException(err);
+        }
+    }
+
+    @Transition(to = "STANDBY", from = "OFFLINE")
+    public void onBecomeStandbyFromOffline(Message message, NotificationContext context) {
+
+    }
+
+    @Transition(to = "OFFLINE", from = "STANDBY")
+    public void onBecomeOfflineFromStandby(Message message, NotificationContext context) {
+
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java b/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java
index e06a06c..dcf91fd 100644
--- a/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java
+++ b/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java
@@ -18,15 +18,28 @@
 
 package org.apache.kylin.rest.request;
 
+import com.google.common.base.Preconditions;
+import org.apache.kylin.rest.helix.HelixClusterAdmin;
+
+import static org.apache.kylin.rest.helix.HelixClusterAdmin.RESOURCE_STREAME_CUBE_PREFIX;
+
 public class StreamingBuildRequest {
 
     private String streaming;
     private long start;
     private long end;
-    private boolean fillGap;
     private String message;
     private boolean successful;
 
+    public StreamingBuildRequest() {
+    }
+
+    public StreamingBuildRequest(String streaming, long start, long end) {
+        this.streaming = streaming;
+        this.start = start;
+        this.end = end;
+    }
+
     public String getStreaming() {
         return streaming;
     }
@@ -67,11 +80,17 @@ public class StreamingBuildRequest {
         this.end = end;
     }
 
-    public boolean isFillGap() {
-        return fillGap;
+    public String toResourceName() {
+        return HelixClusterAdmin.RESOURCE_STREAME_CUBE_PREFIX + streaming + "_" + start + "_" + end;
     }
 
-    public void setFillGap(boolean fillGap) {
-        this.fillGap = fillGap;
+    public static StreamingBuildRequest fromResourceName(String resourceName) {
+        Preconditions.checkArgument(resourceName.startsWith(RESOURCE_STREAME_CUBE_PREFIX));
+        long end = Long.parseLong(resourceName.substring(resourceName.lastIndexOf("_") + 1));
+        String temp = resourceName.substring(RESOURCE_STREAME_CUBE_PREFIX.length(), resourceName.lastIndexOf("_"));
+        long start = Long.parseLong(temp.substring(temp.lastIndexOf("_") + 1));
+        String streamingConfig = temp.substring(0, temp.lastIndexOf("_"));
+
+        return new StreamingBuildRequest(streamingConfig, start, end);
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/server/src/main/java/org/apache/kylin/rest/security/KylinAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/security/KylinAuthenticationProvider.java b/server/src/main/java/org/apache/kylin/rest/security/KylinAuthenticationProvider.java
index 1f147ef..b8dcd43 100644
--- a/server/src/main/java/org/apache/kylin/rest/security/KylinAuthenticationProvider.java
+++ b/server/src/main/java/org/apache/kylin/rest/security/KylinAuthenticationProvider.java
@@ -72,7 +72,8 @@ public class KylinAuthenticationProvider implements AuthenticationProvider {
             }
 
             logger.debug("Authenticated user " + authed.toString());
-            
+
+            SecurityContextHolder.getContext().setAuthentication(authed);
             UserDetails user;
             
             if (authed.getDetails() == null) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java b/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
index da20949..7c2cc48 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
@@ -21,7 +21,6 @@ package org.apache.kylin.rest.service;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.cube.CubeInstance;
-import org.apache.kylin.engine.streaming.BootstrapConfig;
 import org.apache.kylin.engine.streaming.StreamingConfig;
 import org.apache.kylin.engine.streaming.StreamingManager;
 import org.apache.kylin.engine.streaming.monitor.StreamingMonitor;
@@ -54,8 +53,8 @@ public class StreamingService extends BasicService {
         if (null == cubeInstance) {
             streamingConfigs = getStreamingManager().listAllStreaming();
         } else {
-            for(StreamingConfig config : getStreamingManager().listAllStreaming()){
-                if(cubeInstance.getName().equals(config.getCubeName())){
+            for (StreamingConfig config : getStreamingManager().listAllStreaming()) {
+                if (cubeInstance.getName().equals(config.getCubeName())) {
                     streamingConfigs.add(config);
                 }
             }
@@ -84,34 +83,35 @@ public class StreamingService extends BasicService {
         if (getStreamingManager().getStreamingConfig(config.getName()) != null) {
             throw new InternalErrorException("The streamingConfig named " + config.getName() + " already exists");
         }
-        StreamingConfig streamingConfig =  getStreamingManager().saveStreamingConfig(config);
+        StreamingConfig streamingConfig = getStreamingManager().saveStreamingConfig(config);
         return streamingConfig;
     }
 
-//    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#desc, 'ADMINISTRATION') or hasPermission(#desc, 'MANAGEMENT')")
+    //    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#desc, 'ADMINISTRATION') or hasPermission(#desc, 'MANAGEMENT')")
     public StreamingConfig updateStreamingConfig(StreamingConfig config) throws IOException {
         return getStreamingManager().updateStreamingConfig(config);
     }
 
-//    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#desc, 'ADMINISTRATION') or hasPermission(#desc, 'MANAGEMENT')")
+    //    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#desc, 'ADMINISTRATION') or hasPermission(#desc, 'MANAGEMENT')")
     public void dropStreamingConfig(StreamingConfig config) throws IOException {
         getStreamingManager().removeStreamingConfig(config);
     }
 
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
+    public void buildStream(CubeInstance cube, StreamingBuildRequest streamingBuildRequest) {
+        HelixClusterAdmin clusterAdmin = HelixClusterAdmin.getInstance(KylinConfig.getInstanceFromEnv());
+        clusterAdmin.addStreamingJob(streamingBuildRequest);
+    }
 
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
-    public void buildStream(String cube, StreamingBuildRequest streamingBuildRequest) {
+    public void fillGap(CubeInstance cube) {
         HelixClusterAdmin clusterAdmin = HelixClusterAdmin.getInstance(KylinConfig.getInstanceFromEnv());
-        if (streamingBuildRequest.isFillGap()) {
-            final StreamingConfig streamingConfig = StreamingManager.getInstance(KylinConfig.getInstanceFromEnv()).getStreamingConfig(streamingBuildRequest.getStreaming());
-            final List<Pair<Long, Long>> gaps = StreamingMonitor.findGaps(streamingConfig.getCubeName());
-            logger.info("all gaps:" + org.apache.commons.lang3.StringUtils.join(gaps, ","));
-            for (Pair<Long, Long> gap : gaps) {
-                clusterAdmin.addStreamingJob(streamingBuildRequest.getStreaming(), gap.getFirst(), gap.getSecond());
-            }
-        } else {
-            clusterAdmin.addStreamingJob(streamingBuildRequest.getStreaming(), streamingBuildRequest.getStart(), streamingBuildRequest.getEnd());
+        final StreamingConfig streamingConfig = StreamingManager.getInstance(KylinConfig.getInstanceFromEnv()).getStreamingConfigByCube(cube.getName());
+        final List<Pair<Long, Long>> gaps = StreamingMonitor.findGaps(streamingConfig.getCubeName());
+        logger.info("all gaps:" + org.apache.commons.lang3.StringUtils.join(gaps, ","));
+        for (Pair<Long, Long> gap : gaps) {
+            StreamingBuildRequest streamingBuildRequest = new StreamingBuildRequest(streamingConfig.getName(), gap.getFirst(), gap.getSecond());
+            clusterAdmin.addStreamingJob(streamingBuildRequest);
         }
     }
-
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/server/src/test/java/org/apache/kylin/rest/controller/UserControllerTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/controller/UserControllerTest.java b/server/src/test/java/org/apache/kylin/rest/controller/UserControllerTest.java
index ab77a9a..fe0e67a 100644
--- a/server/src/test/java/org/apache/kylin/rest/controller/UserControllerTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/controller/UserControllerTest.java
@@ -41,15 +41,6 @@ public class UserControllerTest extends ServiceTestBase {
 
     private UserController userController;
 
-    @BeforeClass
-    public static void setupResource() {
-        staticCreateTestMetadata();
-        List<GrantedAuthority> authorities = new ArrayList<GrantedAuthority>();
-        User user = new User("ADMIN", "ADMIN", authorities);
-        Authentication authentication = new TestingAuthenticationToken(user, "ADMIN", "ROLE_ADMIN");
-        SecurityContextHolder.getContext().setAuthentication(authentication);
-    }
-
     @Before
     public void setup() throws Exception {
         super.setup();

http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java b/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java
index 594e76b5..1c8b779 100644
--- a/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.helix.manager.zk.ZKHelixAdmin;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
+import org.apache.kylin.rest.service.TestBaseWithZookeeper;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -39,10 +40,7 @@ import static org.junit.Assert.assertTrue;
 
 /**
 */
-public class HelixClusterAdminTest extends LocalFileMetadataTestCase {
-
-    String zkAddress = "localhost:2199";
-    ZkServer server;
+public class HelixClusterAdminTest extends TestBaseWithZookeeper {
 
     HelixClusterAdmin clusterAdmin1;
     HelixClusterAdmin clusterAdmin2;
@@ -52,21 +50,8 @@ public class HelixClusterAdminTest extends LocalFileMetadataTestCase {
 
     @Before
     public void setup() throws Exception {
-        createTestMetadata();
-        // start zookeeper on localhost
-        final File tmpDir = File.createTempFile("HelixClusterAdminTest", null); 
-        FileUtil.fullyDelete(tmpDir);
-        tmpDir.mkdirs();
-        server = new ZkServer(tmpDir.getAbsolutePath() + "/dataDir", tmpDir.getAbsolutePath() + "/logDir", new IDefaultNameSpace() {
-            @Override
-            public void createDefaultNameSpace(ZkClient zkClient) {
-            }
-        }, 2199);
-        server.start();
-
         kylinConfig = this.getTestConfig();
         kylinConfig.setRestAddress("localhost:7070");
-        kylinConfig.setZookeeperAddress(zkAddress);
         kylinConfig.setClusterName(CLUSTER_NAME);
         
         final ZKHelixAdmin zkHelixAdmin = new ZKHelixAdmin(zkAddress);
@@ -105,7 +90,7 @@ public class HelixClusterAdminTest extends LocalFileMetadataTestCase {
         
         // 3. shutdown the first instance
         clusterAdmin1.stop();
-        clusterAdmin1 = null;
+//        clusterAdmin1 = null;
         Thread.sleep(1000);
         assertTrue(clusterAdmin2.isLeaderRole(RESOURCE_NAME_JOB_ENGINE));
         assertEquals(1, kylinConfig.getRestServers().length);
@@ -133,7 +118,6 @@ public class HelixClusterAdminTest extends LocalFileMetadataTestCase {
             clusterAdmin2.stop();
         }
         
-        server.shutdown();
         cleanupTestMetadata();
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java b/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
index 763bebe..8193884 100644
--- a/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
@@ -1,366 +1,354 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.rest.service;
-
-import static org.junit.Assert.*;
-
-import java.io.File;
-import java.util.Arrays;
-import java.util.Set;
-import java.util.UUID;
-import java.util.concurrent.atomic.AtomicLong;
-
-import org.I0Itec.zkclient.IDefaultNameSpace;
-import org.I0Itec.zkclient.ZkClient;
-import org.I0Itec.zkclient.ZkServer;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.restclient.Broadcaster;
-import org.apache.kylin.common.util.LocalFileMetadataTestCase;
-import org.apache.kylin.cube.CubeDescManager;
-import org.apache.kylin.cube.CubeInstance;
-import org.apache.kylin.cube.CubeManager;
-import org.apache.kylin.cube.CubeSegment;
-import org.apache.kylin.cube.CubeUpdate;
-import org.apache.kylin.cube.model.CubeDesc;
-import org.apache.kylin.metadata.MetadataManager;
-import org.apache.kylin.metadata.model.DataModelDesc;
-import org.apache.kylin.metadata.model.LookupDesc;
-import org.apache.kylin.metadata.model.TableDesc;
-import org.apache.kylin.metadata.project.ProjectInstance;
-import org.apache.kylin.metadata.project.ProjectManager;
-import org.apache.kylin.metadata.realization.IRealization;
-import org.apache.kylin.metadata.realization.RealizationType;
-import org.apache.kylin.rest.broadcaster.BroadcasterReceiveServlet;
-import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.servlet.ServletContextHandler;
-import org.eclipse.jetty.servlet.ServletHolder;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- */
-public class CacheServiceTest extends LocalFileMetadataTestCase {
-
-    private static Server server;
-
-    private static String ZK_ADDRESS = "localhost:2199";
-    
-    private static KylinConfig configA;
-    private static KylinConfig configB;
-
-    private static final Logger logger = LoggerFactory.getLogger(CacheServiceTest.class);
-
-    private static AtomicLong counter = new AtomicLong();
-
-    @BeforeClass
-    public static void beforeClass() throws Exception {
-        staticCreateTestMetadata();
-        configA = KylinConfig.getInstanceFromEnv();
-        configA.setProperty("kylin.rest.servers", "localhost:7070");
-        configB = KylinConfig.getKylinConfigFromInputStream(KylinConfig.getKylinPropertiesAsInputSteam());
-        configB.setProperty("kylin.rest.servers", "localhost:7070");
-        configB.setMetadataUrl("../examples/test_metadata");
-
-        server = new Server(7070);
-        ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
-        context.setContextPath("/");
-        server.setHandler(context);
-
-        final CacheService serviceA = new CacheService() {
-            @Override
-            public KylinConfig getConfig() {
-                return configA;
-            }
-        };
-        final CacheService serviceB = new CacheService() {
-            @Override
-            public KylinConfig getConfig() {
-                return configB;
-            }
-        };
-
-        final CubeService cubeServiceA = new CubeService() {
-            @Override
-            public KylinConfig getConfig() {
-                return configA;
-            }
-        };
-        final CubeService cubeServiceB = new CubeService() {
-            @Override
-            public KylinConfig getConfig() {
-                return configB;
-            }
-        };
-
-        serviceA.setCubeService(cubeServiceA);
-        serviceA.initCubeChangeListener();
-        serviceB.setCubeService(cubeServiceB);
-        serviceB.initCubeChangeListener();
-
-        context.addServlet(new ServletHolder(new BroadcasterReceiveServlet(new BroadcasterReceiveServlet.BroadcasterHandler() {
-            @Override
-            public void handle(String type, String name, String event) {
-
-                Broadcaster.TYPE wipeType = Broadcaster.TYPE.getType(type);
-                Broadcaster.EVENT wipeEvent = Broadcaster.EVENT.getEvent(event);
-                final String log = "wipe cache type: " + wipeType + " event:" + wipeEvent + " name:" + name;
-                logger.info(log);
-                try {
-                    switch (wipeEvent) {
-                    case CREATE:
-                    case UPDATE:
-                        serviceA.rebuildCache(wipeType, name);
-                        serviceB.rebuildCache(wipeType, name);
-                        break;
-                    case DROP:
-                        serviceA.removeCache(wipeType, name);
-                        serviceB.removeCache(wipeType, name);
-                        break;
-                    default:
-                        throw new RuntimeException("invalid type:" + wipeEvent);
-                    }
-                } finally {
-                    counter.incrementAndGet();
-                }
-            }
-        })), "/");
-
-        server.start();
-    }
-
-    @AfterClass
-    public static void afterClass() throws Exception {
-        server.stop();
-        cleanAfterClass();
-    }
-
-    @Before
-    public void setUp() throws Exception {
-        counter.set(0L);
-        createTestMetadata();
-    }
-
-    @After
-    public void after() throws Exception {
-        cleanupTestMetadata();
-    }
-
-    private void waitForCounterAndClear(long count) {
-        int retryTimes = 0;
-        while ((!counter.compareAndSet(count, 0L))) {
-            if (++retryTimes > 30) {
-                throw new RuntimeException("timeout");
-            }
-            try {
-                Thread.sleep(100L);
-            } catch (InterruptedException e) {
-                e.printStackTrace();
-            }
-        }
-    }
-
-    private static CubeManager getCubeManager(KylinConfig config) throws Exception {
-        return CubeManager.getInstance(config);
-    }
-
-    private static ProjectManager getProjectManager(KylinConfig config) throws Exception {
-        return ProjectManager.getInstance(config);
-    }
-
-    private static CubeDescManager getCubeDescManager(KylinConfig config) throws Exception {
-        return CubeDescManager.getInstance(config);
-    }
-
-    private static MetadataManager getMetadataManager(KylinConfig config) throws Exception {
-        return MetadataManager.getInstance(config);
-    }
-
-    @Test
-    public void testBasic() throws Exception {
-        assertTrue(!configA.equals(configB));
-
-        assertNotNull(getCubeManager(configA));
-        assertNotNull(getCubeManager(configB));
-        assertNotNull(getCubeDescManager(configA));
-        assertNotNull(getCubeDescManager(configB));
-        assertNotNull(getProjectManager(configB));
-        assertNotNull(getProjectManager(configB));
-        assertNotNull(getMetadataManager(configB));
-        assertNotNull(getMetadataManager(configB));
-
-        assertTrue(!getCubeManager(configA).equals(getCubeManager(configB)));
-        assertTrue(!getCubeDescManager(configA).equals(getCubeDescManager(configB)));
-        assertTrue(!getProjectManager(configA).equals(getProjectManager(configB)));
-        assertTrue(!getMetadataManager(configA).equals(getMetadataManager(configB)));
-
-        assertEquals(getProjectManager(configA).listAllProjects().size(), getProjectManager(configB).listAllProjects().size());
-    }
-
-    @Test
-    public void testCubeCRUD() throws Exception {
-        final Broadcaster broadcaster = Broadcaster.getInstance(configA);
-        broadcaster.getCounterAndClear();
-
-        getStore().deleteResource("/cube/a_whole_new_cube.json");
-
-        //create cube
-
-        final String cubeName = "a_whole_new_cube";
-        final CubeManager cubeManager = getCubeManager(configA);
-        final CubeManager cubeManagerB = getCubeManager(configB);
-        final ProjectManager projectManager = getProjectManager(configA);
-        final ProjectManager projectManagerB = getProjectManager(configB);
-        final CubeDescManager cubeDescManager = getCubeDescManager(configA);
-        final CubeDescManager cubeDescManagerB = getCubeDescManager(configB);
-        final CubeDesc cubeDesc = getCubeDescManager(configA).getCubeDesc("test_kylin_cube_with_slr_desc");
-
-        assertTrue(cubeManager.getCube(cubeName) == null);
-        assertTrue(cubeManagerB.getCube(cubeName) == null);
-        assertTrue(!containsRealization(projectManager.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME), RealizationType.CUBE, cubeName));
-        assertTrue(!containsRealization(projectManagerB.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME), RealizationType.CUBE, cubeName));
-        cubeManager.createCube(cubeName, ProjectInstance.DEFAULT_PROJECT_NAME, cubeDesc, null);
-        //one for cube update, one for project update
-        assertEquals(2, broadcaster.getCounterAndClear());
-        waitForCounterAndClear(2);
-
-        assertNotNull(cubeManager.getCube(cubeName));
-        assertNotNull(cubeManagerB.getCube(cubeName));
-        assertTrue(containsRealization(projectManager.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME), RealizationType.CUBE, cubeName));
-        assertTrue(containsRealization(projectManagerB.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME), RealizationType.CUBE, cubeName));
-
-        //update cube
-        CubeInstance cube = cubeManager.getCube(cubeName);
-        assertEquals(0, cube.getSegments().size());
-        assertEquals(0, cubeManagerB.getCube(cubeName).getSegments().size());
-        CubeSegment segment = new CubeSegment();
-        segment.setName("test_segment");
-        CubeUpdate cubeBuilder = new CubeUpdate(cube);
-        cubeBuilder.setToAddSegs(segment);
-        cube = cubeManager.updateCube(cubeBuilder);
-        //one for cube update
-        assertEquals(1, broadcaster.getCounterAndClear());
-        waitForCounterAndClear(1);
-        assertEquals(1, cubeManagerB.getCube(cubeName).getSegments().size());
-        assertEquals(segment.getName(), cubeManagerB.getCube(cubeName).getSegments().get(0).getName());
-
-        //delete cube
-        cubeManager.dropCube(cubeName, false);
-        //one for cube update, one for project update
-        assertEquals(2, broadcaster.getCounterAndClear());
-        waitForCounterAndClear(2);
-
-        assertTrue(cubeManager.getCube(cubeName) == null);
-        assertTrue(!containsRealization(projectManager.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME), RealizationType.CUBE, cubeName));
-        assertTrue(cubeManagerB.getCube(cubeName) == null);
-        assertTrue(!containsRealization(projectManagerB.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME), RealizationType.CUBE, cubeName));
-
-        final String cubeDescName = "test_cube_desc";
-        cubeDesc.setName(cubeDescName);
-        cubeDesc.setLastModified(0);
-        assertTrue(cubeDescManager.getCubeDesc(cubeDescName) == null);
-        assertTrue(cubeDescManagerB.getCubeDesc(cubeDescName) == null);
-        cubeDescManager.createCubeDesc(cubeDesc);
-        //one for add cube desc
-        assertEquals(1, broadcaster.getCounterAndClear());
-        waitForCounterAndClear(1);
-        assertNotNull(cubeDescManager.getCubeDesc(cubeDescName));
-        assertNotNull(cubeDescManagerB.getCubeDesc(cubeDescName));
-
-        cubeDesc.setNotifyList(Arrays.asList("test@email", "test@email", "test@email"));
-        cubeDescManager.updateCubeDesc(cubeDesc);
-        assertEquals(1, broadcaster.getCounterAndClear());
-        waitForCounterAndClear(1);
-        assertEquals(cubeDesc.getNotifyList(), cubeDescManagerB.getCubeDesc(cubeDescName).getNotifyList());
-
-        cubeDescManager.removeCubeDesc(cubeDesc);
-        //one for add cube desc
-        assertEquals(1, broadcaster.getCounterAndClear());
-        waitForCounterAndClear(1);
-        assertTrue(cubeDescManager.getCubeDesc(cubeDescName) == null);
-        assertTrue(cubeDescManagerB.getCubeDesc(cubeDescName) == null);
-
-        getStore().deleteResource("/cube/a_whole_new_cube.json");
-    }
-
-    private TableDesc createTestTableDesc() {
-        TableDesc tableDesc = new TableDesc();
-        tableDesc.setDatabase("TEST_DB");
-        tableDesc.setName("TEST_TABLE");
-        tableDesc.setUuid(UUID.randomUUID().toString());
-        tableDesc.setLastModified(0);
-        return tableDesc;
-    }
-
-    @Test
-    public void testMetaCRUD() throws Exception {
-        final MetadataManager metadataManager = MetadataManager.getInstance(configA);
-        final MetadataManager metadataManagerB = MetadataManager.getInstance(configB);
-        final Broadcaster broadcaster = Broadcaster.getInstance(configA);
-        broadcaster.getCounterAndClear();
-
-        TableDesc tableDesc = createTestTableDesc();
-        assertTrue(metadataManager.getTableDesc(tableDesc.getIdentity()) == null);
-        assertTrue(metadataManagerB.getTableDesc(tableDesc.getIdentity()) == null);
-        metadataManager.saveSourceTable(tableDesc);
-        //only one for table insert
-        assertEquals(1, broadcaster.getCounterAndClear());
-        waitForCounterAndClear(1);
-        assertNotNull(metadataManager.getTableDesc(tableDesc.getIdentity()));
-        assertNotNull(metadataManagerB.getTableDesc(tableDesc.getIdentity()));
-
-        final String dataModelName = "test_data_model";
-        DataModelDesc dataModelDesc = metadataManager.getDataModelDesc("test_kylin_left_join_model_desc");
-        dataModelDesc.setName(dataModelName);
-        dataModelDesc.setLastModified(0);
-        assertTrue(metadataManager.getDataModelDesc(dataModelName) == null);
-        assertTrue(metadataManagerB.getDataModelDesc(dataModelName) == null);
-
-        dataModelDesc.setName(dataModelName);
-        metadataManager.createDataModelDesc(dataModelDesc, "default", "ADMIN");
-        //one for data model creation, one for project meta update
-        assertEquals(2, broadcaster.getCounterAndClear());
-        waitForCounterAndClear(2);
-        assertEquals(dataModelDesc.getName(), metadataManagerB.getDataModelDesc(dataModelName).getName());
-
-        final LookupDesc[] lookups = dataModelDesc.getLookups();
-        assertTrue(lookups.length > 0);
-        dataModelDesc.setLookups(lookups);
-        metadataManager.updateDataModelDesc(dataModelDesc);
-        //only one for data model update
-        assertEquals(1, broadcaster.getCounterAndClear());
-        waitForCounterAndClear(1);
-        assertEquals(dataModelDesc.getLookups().length, metadataManagerB.getDataModelDesc(dataModelName).getLookups().length);
-
-    }
-
-    private boolean containsRealization(Set<IRealization> realizations, RealizationType type, String name) {
-        for (IRealization realization : realizations) {
-            if (realization.getType() == type && realization.getName().equals(name)) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-}
+///*
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// * 
+// *     http://www.apache.org/licenses/LICENSE-2.0
+// * 
+// * Unless required by applicable law or agreed to in writing, software
+// * distributed under the License is distributed on an "AS IS" BASIS,
+// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// * See the License for the specific language governing permissions and
+// * limitations under the License.
+//*/
+//
+//package org.apache.kylin.rest.service;
+//
+//import org.apache.kylin.common.KylinConfig;
+//import org.apache.kylin.common.restclient.Broadcaster;
+//import org.apache.kylin.common.util.LocalFileMetadataTestCase;
+//import org.apache.kylin.cube.*;
+//import org.apache.kylin.cube.model.CubeDesc;
+//import org.apache.kylin.metadata.MetadataManager;
+//import org.apache.kylin.metadata.model.DataModelDesc;
+//import org.apache.kylin.metadata.model.LookupDesc;
+//import org.apache.kylin.metadata.model.TableDesc;
+//import org.apache.kylin.metadata.project.ProjectInstance;
+//import org.apache.kylin.metadata.project.ProjectManager;
+//import org.apache.kylin.metadata.realization.IRealization;
+//import org.apache.kylin.metadata.realization.RealizationType;
+//import org.apache.kylin.rest.broadcaster.BroadcasterReceiveServlet;
+//import org.eclipse.jetty.server.Server;
+//import org.eclipse.jetty.servlet.ServletContextHandler;
+//import org.eclipse.jetty.servlet.ServletHolder;
+//import org.junit.*;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//
+//import java.util.Arrays;
+//import java.util.Set;
+//import java.util.UUID;
+//import java.util.concurrent.atomic.AtomicLong;
+//
+//import static org.junit.Assert.*;
+//
+///**
+// */
+//public class CacheServiceTest extends LocalFileMetadataTestCase {
+//
+//    private static Server server;
+//
+//    private static String ZK_ADDRESS = "localhost:2199";
+//
+//    private static KylinConfig configA;
+//    private static KylinConfig configB;
+//
+//    private static final Logger logger = LoggerFactory.getLogger(CacheServiceTest.class);
+//
+//    private static AtomicLong counter = new AtomicLong();
+//
+//    @BeforeClass
+//    public static void beforeClass() throws Exception {
+//        staticCreateTestMetadata();
+//        configA = KylinConfig.getInstanceFromEnv();
+//        configA.setProperty("kylin.rest.servers", "localhost:7070");
+//        configB = KylinConfig.getKylinConfigFromInputStream(KylinConfig.getKylinPropertiesAsInputSteam());
+//        configB.setProperty("kylin.rest.servers", "localhost:7070");
+//        configB.setMetadataUrl("../examples/test_metadata");
+//
+//        server = new Server(7070);
+//        ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
+//        context.setContextPath("/");
+//        server.setHandler(context);
+//
+//        final CacheService serviceA = new CacheService() {
+//            @Override
+//            public KylinConfig getConfig() {
+//                return configA;
+//            }
+//        };
+//        final CacheService serviceB = new CacheService() {
+//            @Override
+//            public KylinConfig getConfig() {
+//                return configB;
+//            }
+//        };
+//
+//        final CubeService cubeServiceA = new CubeService() {
+//            @Override
+//            public KylinConfig getConfig() {
+//                return configA;
+//            }
+//        };
+//        final CubeService cubeServiceB = new CubeService() {
+//            @Override
+//            public KylinConfig getConfig() {
+//                return configB;
+//            }
+//        };
+//
+//        serviceA.setCubeService(cubeServiceA);
+//        serviceA.initCubeChangeListener();
+//        serviceB.setCubeService(cubeServiceB);
+//        serviceB.initCubeChangeListener();
+//
+//        context.addServlet(new ServletHolder(new BroadcasterReceiveServlet(new BroadcasterReceiveServlet.BroadcasterHandler() {
+//            @Override
+//            public void handle(String type, String name, String event) {
+//
+//                Broadcaster.TYPE wipeType = Broadcaster.TYPE.getType(type);
+//                Broadcaster.EVENT wipeEvent = Broadcaster.EVENT.getEvent(event);
+//                final String log = "wipe cache type: " + wipeType + " event:" + wipeEvent + " name:" + name;
+//                logger.info(log);
+//                try {
+//                    switch (wipeEvent) {
+//                    case CREATE:
+//                    case UPDATE:
+//                        serviceA.rebuildCache(wipeType, name);
+//                        serviceB.rebuildCache(wipeType, name);
+//                        break;
+//                    case DROP:
+//                        serviceA.removeCache(wipeType, name);
+//                        serviceB.removeCache(wipeType, name);
+//                        break;
+//                    default:
+//                        throw new RuntimeException("invalid type:" + wipeEvent);
+//                    }
+//                } finally {
+//                    counter.incrementAndGet();
+//                }
+//            }
+//        })), "/");
+//
+//        server.start();
+//    }
+//
+//    @AfterClass
+//    public static void afterClass() throws Exception {
+//        server.stop();
+//        cleanAfterClass();
+//    }
+//
+//    @Before
+//    public void setUp() throws Exception {
+//        counter.set(0L);
+//        createTestMetadata();
+//    }
+//
+//    @After
+//    public void after() throws Exception {
+//        cleanupTestMetadata();
+//    }
+//
+//    private void waitForCounterAndClear(long count) {
+//        int retryTimes = 0;
+//        while ((!counter.compareAndSet(count, 0L))) {
+//            if (++retryTimes > 30) {
+//                throw new RuntimeException("timeout");
+//            }
+//            try {
+//                Thread.sleep(100L);
+//            } catch (InterruptedException e) {
+//                e.printStackTrace();
+//            }
+//        }
+//    }
+//
+//    private static CubeManager getCubeManager(KylinConfig config) throws Exception {
+//        return CubeManager.getInstance(config);
+//    }
+//
+//    private static ProjectManager getProjectManager(KylinConfig config) throws Exception {
+//        return ProjectManager.getInstance(config);
+//    }
+//
+//    private static CubeDescManager getCubeDescManager(KylinConfig config) throws Exception {
+//        return CubeDescManager.getInstance(config);
+//    }
+//
+//    private static MetadataManager getMetadataManager(KylinConfig config) throws Exception {
+//        return MetadataManager.getInstance(config);
+//    }
+//
+//    @Test
+//    public void testBasic() throws Exception {
+//        assertTrue(!configA.equals(configB));
+//
+//        assertNotNull(getCubeManager(configA));
+//        assertNotNull(getCubeManager(configB));
+//        assertNotNull(getCubeDescManager(configA));
+//        assertNotNull(getCubeDescManager(configB));
+//        assertNotNull(getProjectManager(configB));
+//        assertNotNull(getProjectManager(configB));
+//        assertNotNull(getMetadataManager(configB));
+//        assertNotNull(getMetadataManager(configB));
+//
+//        assertTrue(!getCubeManager(configA).equals(getCubeManager(configB)));
+//        assertTrue(!getCubeDescManager(configA).equals(getCubeDescManager(configB)));
+//        assertTrue(!getProjectManager(configA).equals(getProjectManager(configB)));
+//        assertTrue(!getMetadataManager(configA).equals(getMetadataManager(configB)));
+//
+//        assertEquals(getProjectManager(configA).listAllProjects().size(), getProjectManager(configB).listAllProjects().size());
+//    }
+//
+//    @Test
+//    public void testCubeCRUD() throws Exception {
+//        final Broadcaster broadcaster = Broadcaster.getInstance(configA);
+//        broadcaster.getCounterAndClear();
+//
+//        getStore().deleteResource("/cube/a_whole_new_cube.json");
+//
+//        //create cube
+//
+//        final String cubeName = "a_whole_new_cube";
+//        final CubeManager cubeManager = getCubeManager(configA);
+//        final CubeManager cubeManagerB = getCubeManager(configB);
+//        final ProjectManager projectManager = getProjectManager(configA);
+//        final ProjectManager projectManagerB = getProjectManager(configB);
+//        final CubeDescManager cubeDescManager = getCubeDescManager(configA);
+//        final CubeDescManager cubeDescManagerB = getCubeDescManager(configB);
+//        final CubeDesc cubeDesc = getCubeDescManager(configA).getCubeDesc("test_kylin_cube_with_slr_desc");
+//
+//        assertTrue(cubeManager.getCube(cubeName) == null);
+//        assertTrue(cubeManagerB.getCube(cubeName) == null);
+//        assertTrue(!containsRealization(projectManager.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME), RealizationType.CUBE, cubeName));
+//        assertTrue(!containsRealization(projectManagerB.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME), RealizationType.CUBE, cubeName));
+//        cubeManager.createCube(cubeName, ProjectInstance.DEFAULT_PROJECT_NAME, cubeDesc, null);
+//        //one for cube update, one for project update
+//        assertEquals(2, broadcaster.getCounterAndClear());
+//        waitForCounterAndClear(2);
+//
+//        assertNotNull(cubeManager.getCube(cubeName));
+//        assertNotNull(cubeManagerB.getCube(cubeName));
+//        assertTrue(containsRealization(projectManager.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME), RealizationType.CUBE, cubeName));
+//        assertTrue(containsRealization(projectManagerB.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME), RealizationType.CUBE, cubeName));
+//
+//        //update cube
+//        CubeInstance cube = cubeManager.getCube(cubeName);
+//        assertEquals(0, cube.getSegments().size());
+//        assertEquals(0, cubeManagerB.getCube(cubeName).getSegments().size());
+//        CubeSegment segment = new CubeSegment();
+//        segment.setName("test_segment");
+//        CubeUpdate cubeBuilder = new CubeUpdate(cube);
+//        cubeBuilder.setToAddSegs(segment);
+//        cube = cubeManager.updateCube(cubeBuilder);
+//        //one for cube update
+//        assertEquals(1, broadcaster.getCounterAndClear());
+//        waitForCounterAndClear(1);
+//        assertEquals(1, cubeManagerB.getCube(cubeName).getSegments().size());
+//        assertEquals(segment.getName(), cubeManagerB.getCube(cubeName).getSegments().get(0).getName());
+//
+//        //delete cube
+//        cubeManager.dropCube(cubeName, false);
+//        //one for cube update, one for project update
+//        assertEquals(2, broadcaster.getCounterAndClear());
+//        waitForCounterAndClear(2);
+//
+//        assertTrue(cubeManager.getCube(cubeName) == null);
+//        assertTrue(!containsRealization(projectManager.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME), RealizationType.CUBE, cubeName));
+//        assertTrue(cubeManagerB.getCube(cubeName) == null);
+//        assertTrue(!containsRealization(projectManagerB.listAllRealizations(ProjectInstance.DEFAULT_PROJECT_NAME), RealizationType.CUBE, cubeName));
+//
+//        final String cubeDescName = "test_cube_desc";
+//        cubeDesc.setName(cubeDescName);
+//        cubeDesc.setLastModified(0);
+//        assertTrue(cubeDescManager.getCubeDesc(cubeDescName) == null);
+//        assertTrue(cubeDescManagerB.getCubeDesc(cubeDescName) == null);
+//        cubeDescManager.createCubeDesc(cubeDesc);
+//        //one for add cube desc
+//        assertEquals(1, broadcaster.getCounterAndClear());
+//        waitForCounterAndClear(1);
+//        assertNotNull(cubeDescManager.getCubeDesc(cubeDescName));
+//        assertNotNull(cubeDescManagerB.getCubeDesc(cubeDescName));
+//
+//        cubeDesc.setNotifyList(Arrays.asList("test@email", "test@email", "test@email"));
+//        cubeDescManager.updateCubeDesc(cubeDesc);
+//        assertEquals(1, broadcaster.getCounterAndClear());
+//        waitForCounterAndClear(1);
+//        assertEquals(cubeDesc.getNotifyList(), cubeDescManagerB.getCubeDesc(cubeDescName).getNotifyList());
+//
+//        cubeDescManager.removeCubeDesc(cubeDesc);
+//        //one for add cube desc
+//        assertEquals(1, broadcaster.getCounterAndClear());
+//        waitForCounterAndClear(1);
+//        assertTrue(cubeDescManager.getCubeDesc(cubeDescName) == null);
+//        assertTrue(cubeDescManagerB.getCubeDesc(cubeDescName) == null);
+//
+//        getStore().deleteResource("/cube/a_whole_new_cube.json");
+//    }
+//
+//    private TableDesc createTestTableDesc() {
+//        TableDesc tableDesc = new TableDesc();
+//        tableDesc.setDatabase("TEST_DB");
+//        tableDesc.setName("TEST_TABLE");
+//        tableDesc.setUuid(UUID.randomUUID().toString());
+//        tableDesc.setLastModified(0);
+//        return tableDesc;
+//    }
+//
+//    @Test
+//    public void testMetaCRUD() throws Exception {
+//        final MetadataManager metadataManager = MetadataManager.getInstance(configA);
+//        final MetadataManager metadataManagerB = MetadataManager.getInstance(configB);
+//        final Broadcaster broadcaster = Broadcaster.getInstance(configA);
+//        broadcaster.getCounterAndClear();
+//
+//        TableDesc tableDesc = createTestTableDesc();
+//        assertTrue(metadataManager.getTableDesc(tableDesc.getIdentity()) == null);
+//        assertTrue(metadataManagerB.getTableDesc(tableDesc.getIdentity()) == null);
+//        metadataManager.saveSourceTable(tableDesc);
+//        //only one for table insert
+//        assertEquals(1, broadcaster.getCounterAndClear());
+//        waitForCounterAndClear(1);
+//        assertNotNull(metadataManager.getTableDesc(tableDesc.getIdentity()));
+//        assertNotNull(metadataManagerB.getTableDesc(tableDesc.getIdentity()));
+//
+//        final String dataModelName = "test_data_model";
+//        DataModelDesc dataModelDesc = metadataManager.getDataModelDesc("test_kylin_left_join_model_desc");
+//        dataModelDesc.setName(dataModelName);
+//        dataModelDesc.setLastModified(0);
+//        assertTrue(metadataManager.getDataModelDesc(dataModelName) == null);
+//        assertTrue(metadataManagerB.getDataModelDesc(dataModelName) == null);
+//
+//        dataModelDesc.setName(dataModelName);
+//        metadataManager.createDataModelDesc(dataModelDesc, "default", "ADMIN");
+//        //one for data model creation, one for project meta update
+//        assertEquals(2, broadcaster.getCounterAndClear());
+//        waitForCounterAndClear(2);
+//        assertEquals(dataModelDesc.getName(), metadataManagerB.getDataModelDesc(dataModelName).getName());
+//
+//        final LookupDesc[] lookups = dataModelDesc.getLookups();
+//        assertTrue(lookups.length > 0);
+//        dataModelDesc.setLookups(lookups);
+//        metadataManager.updateDataModelDesc(dataModelDesc);
+//        //only one for data model update
+//        assertEquals(1, broadcaster.getCounterAndClear());
+//        waitForCounterAndClear(1);
+//        assertEquals(dataModelDesc.getLookups().length, metadataManagerB.getDataModelDesc(dataModelName).getLookups().length);
+//
+//    }
+//
+//    private boolean containsRealization(Set<IRealization> realizations, RealizationType type, String name) {
+//        for (IRealization realization : realizations) {
+//            if (realization.getType() == type && realization.getName().equals(name)) {
+//                return true;
+//            }
+//        }
+//        return false;
+//    }
+//
+//}

http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/server/src/test/java/org/apache/kylin/rest/service/ServiceTestBase.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/service/ServiceTestBase.java b/server/src/test/java/org/apache/kylin/rest/service/ServiceTestBase.java
index f8dc945..ca4fe39 100644
--- a/server/src/test/java/org/apache/kylin/rest/service/ServiceTestBase.java
+++ b/server/src/test/java/org/apache/kylin/rest/service/ServiceTestBase.java
@@ -18,6 +18,12 @@
 
 package org.apache.kylin.rest.service;
 
+import com.google.common.collect.Lists;
+import org.I0Itec.zkclient.IDefaultNameSpace;
+import org.I0Itec.zkclient.ZkClient;
+import org.I0Itec.zkclient.ZkServer;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
 import org.apache.kylin.cube.CubeDescManager;
 import org.apache.kylin.cube.CubeManager;
@@ -26,42 +32,42 @@ import org.apache.kylin.invertedindex.IIManager;
 import org.apache.kylin.metadata.MetadataManager;
 import org.apache.kylin.metadata.project.ProjectManager;
 import org.apache.kylin.metadata.realization.RealizationRegistry;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.apache.kylin.rest.helix.HelixClusterAdmin;
+import org.junit.*;
 import org.junit.runner.RunWith;
 import org.springframework.security.authentication.TestingAuthenticationToken;
 import org.springframework.security.core.Authentication;
 import org.springframework.security.core.context.SecurityContextHolder;
+import org.springframework.security.core.userdetails.User;
+import org.springframework.security.core.userdetails.UserDetails;
 import org.springframework.test.context.ActiveProfiles;
 import org.springframework.test.context.ContextConfiguration;
 import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
 
+import java.io.File;
+import java.util.Arrays;
+import java.util.List;
+
 /**
  * @author xduo
  */
 @RunWith(SpringJUnit4ClassRunner.class)
 @ContextConfiguration(locations = { "classpath:applicationContext.xml", "classpath:kylinSecurity.xml" })
 @ActiveProfiles("testing")
-public class ServiceTestBase extends LocalFileMetadataTestCase {
-
-    @BeforeClass
-    public static void setupResource() throws Exception {
-        staticCreateTestMetadata();
-        Authentication authentication = new TestingAuthenticationToken("ADMIN", "ADMIN", "ROLE_ADMIN");
-        SecurityContextHolder.getContext().setAuthentication(authentication);
-    }
-
-    @AfterClass
-    public static void tearDownResource() {
-    }
+public class ServiceTestBase extends TestBaseWithZookeeper {
 
     @Before
     public void setup() throws Exception {
         this.createTestMetadata();
 
+        UserService.UserGrantedAuthority userGrantedAuthority = new UserService.UserGrantedAuthority();
+        userGrantedAuthority.setAuthority("ROLE_ADMIN");
+        UserDetails user = new User("ADMIN", "skippped-ldap", Lists.newArrayList(userGrantedAuthority));
+        Authentication authentication = new TestingAuthenticationToken(user, "ADMIN", "ROLE_ADMIN");
+        SecurityContextHolder.getContext().setAuthentication(authentication);
+        KylinConfig kylinConfig = this.getTestConfig();
+        kylinConfig.setRestAddress("localhost:7070");
+
         MetadataManager.clearCache();
         CubeDescManager.clearCache();
         CubeManager.clearCache();


[23/43] kylin git commit: KYLIN-1311 Stream cubing auto assignment and load balance

Posted by sh...@apache.org.
KYLIN-1311 Stream cubing auto assignment and load balance


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/6dd49255
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/6dd49255
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/6dd49255

Branch: refs/heads/helix-rebase
Commit: 6dd49255a2989bc6543539a1038541cd9b0bc860
Parents: d044945
Author: shaofengshi <sh...@apache.org>
Authored: Sat Feb 6 11:49:59 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:19 2016 +0800

----------------------------------------------------------------------
 build/bin/streaming_check.sh                    |  13 ++-
 build/bin/streaming_fillgap.sh                  |   1 -
 build/conf/kylin.properties                     |   6 +-
 .../apache/kylin/common/KylinConfigBase.java    |   4 +
 .../kylin/engine/streaming/StreamingConfig.java |  33 ++++++
 .../engine/streaming/cli/StreamingCLI.java      |   2 +-
 .../streaming/monitor/StreamingMonitor.java     |  11 +-
 .../rest/controller/ClusterController.java      |  55 +++++++---
 .../rest/controller/StreamingController.java    |  52 ++++++++-
 .../kylin/rest/helix/HelixClusterAdmin.java     |  69 +++++++++---
 .../helix/StreamCubeBuildTransitionHandler.java | 105 ++++++++++++++-----
 .../rest/request/StreamingBuildRequest.java     |  13 +--
 .../kylin/rest/service/StreamingService.java    |  27 +++--
 13 files changed, 299 insertions(+), 92 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/6dd49255/build/bin/streaming_check.sh
----------------------------------------------------------------------
diff --git a/build/bin/streaming_check.sh b/build/bin/streaming_check.sh
index fef0139..4c5431a 100644
--- a/build/bin/streaming_check.sh
+++ b/build/bin/streaming_check.sh
@@ -20,10 +20,9 @@
 source /etc/profile
 source ~/.bash_profile
 
-receivers=$1
-host=$2
-tablename=$3
-authorization=$4
-projectname=$5
-cubename=$6
-sh ${KYLIN_HOME}/bin/kylin.sh monitor -receivers ${receivers} -host ${host} -tableName ${tablename} -authorization ${authorization} -cubeName ${cubename} -projectName ${projectname}
\ No newline at end of file
+CUBE_NAME=$1
+AUTHORIZATION=$2
+KYLIN_HOST=$3
+
+cd ${KYLIN_HOME}
+curl --request PUT --header "Authorization: Basic $AUTHORIZATION" --header "Content-Type: application/json" -v ${KYLIN_HOST}/kylin/api/streaming/${CUBE_NAME}/checkgap

http://git-wip-us.apache.org/repos/asf/kylin/blob/6dd49255/build/bin/streaming_fillgap.sh
----------------------------------------------------------------------
diff --git a/build/bin/streaming_fillgap.sh b/build/bin/streaming_fillgap.sh
index 31c4886..fe8c0b5 100644
--- a/build/bin/streaming_fillgap.sh
+++ b/build/bin/streaming_fillgap.sh
@@ -25,5 +25,4 @@ AUTHORIZATION=$2
 KYLIN_HOST=$3
 
 cd ${KYLIN_HOME}
-#sh ${KYLIN_HOME}/bin/kylin.sh streaming start ${streaming} fillgap -streaming ${streaming} -fillGap true -margin ${margin}
 curl --request PUT --header "Authorization: Basic $AUTHORIZATION" --header "Content-Type: application/json" -v ${KYLIN_HOST}/kylin/api/streaming/${CUBE_NAME}/fillgap

http://git-wip-us.apache.org/repos/asf/kylin/blob/6dd49255/build/conf/kylin.properties
----------------------------------------------------------------------
diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
index 558c2f0..5a0b776 100644
--- a/build/conf/kylin.properties
+++ b/build/conf/kylin.properties
@@ -2,12 +2,12 @@
 # Whether this kylin run as an instance of a cluster
 kylin.cluster.enabled=false
 
-# Comma separated list of zk servers; 
-# Optional; if absent, will use HBase zookeeper; set if use a different zk;
+# Comma separated list of zk servers, for cluster coordination; 
+# Optional; if absent, will use HBase zookeeper; set it if use a different zk;
 kylin.zookeeper.address=
 
 # REST address of this instance, need be accessible from other instances;
-# optional, default be <hostname>:7070
+# optional, default be <hostname_fqdn>:<port>
 kylin.rest.address=
 
 # whether run a cluster controller in this instance; a robust cluster need at least 3 controllers.

http://git-wip-us.apache.org/repos/asf/kylin/blob/6dd49255/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 6f535f2..cee92d5 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -549,6 +549,10 @@ public class KylinConfigBase implements Serializable {
     public String getClusterName() {
         return this.getOptional("kylin.cluster.name", getMetadataUrlPrefix());
     }
+    
+    public int getClusterMaxPartitionPerRegion() {
+        return Integer.parseInt(getOptional("kylin.cluster.max.partition.per.resource", "100"));
+    }
 
     public void setClusterName(String clusterName) {
         setProperty("kylin.cluster.name", clusterName);

http://git-wip-us.apache.org/repos/asf/kylin/blob/6dd49255/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingConfig.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingConfig.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingConfig.java
index f0a7ab1..ee9aed8 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingConfig.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingConfig.java
@@ -39,6 +39,7 @@ import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
+import java.util.List;
 
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -64,6 +65,14 @@ public class StreamingConfig extends RootPersistentEntity {
     @JsonProperty("cubeName")
     private String cubeName;
 
+    @JsonProperty("partitions")
+    private List<String> partitions;
+
+    @JsonProperty("max_gap")
+    private long maxGap = 30 * 60 * 1000l; // 30 minutes
+    @JsonProperty("max_gap_number")
+    private int maxGapNumber = 10; // 10
+    
     public String getCubeName() {
         return cubeName;
     }
@@ -96,6 +105,30 @@ public class StreamingConfig extends RootPersistentEntity {
         return ResourceStore.STREAMING_RESOURCE_ROOT + "/" + streamingName + ".json";
     }
 
+    public List<String> getPartitions() {
+        return partitions;
+    }
+
+    public void setPartitions(List<String> partitions) {
+        this.partitions = partitions;
+    }
+
+    public long getMaxGap() {
+        return maxGap;
+    }
+
+    public void setMaxGap(long maxGap) {
+        this.maxGap = maxGap;
+    }
+
+    public int getMaxGapNumber() {
+        return maxGapNumber;
+    }
+
+    public void setMaxGapNumber(int maxGapNumber) {
+        this.maxGapNumber = maxGapNumber;
+    }
+
     @Override
     public StreamingConfig clone() {
         try {

http://git-wip-us.apache.org/repos/asf/kylin/blob/6dd49255/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java
index 96ad1ad..88f5e18 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java
@@ -82,7 +82,7 @@ public class StreamingCLI {
             }
             if (bootstrapConfig.isFillGap()) {
                 final StreamingConfig streamingConfig = StreamingManager.getInstance(KylinConfig.getInstanceFromEnv()).getStreamingConfig(bootstrapConfig.getStreaming());
-                final List<Pair<Long, Long>> gaps = StreamingMonitor.findGaps(streamingConfig.getCubeName());
+                final List<Pair<Long, Long>> gaps = StreamingMonitor.findGaps(streamingConfig.getCubeName(), streamingConfig.getMaxGap());
                 logger.info("all gaps:" + StringUtils.join(gaps, ","));
                 for (Pair<Long, Long> gap : gaps) {
                     startOneOffCubeStreaming(bootstrapConfig.getStreaming(), gap.getFirst(), gap.getSecond());

http://git-wip-us.apache.org/repos/asf/kylin/blob/6dd49255/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/monitor/StreamingMonitor.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/monitor/StreamingMonitor.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/monitor/StreamingMonitor.java
index 9609442..9d2bd45 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/monitor/StreamingMonitor.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/monitor/StreamingMonitor.java
@@ -74,7 +74,7 @@ public class StreamingMonitor {
         sendMail(receivers, title, stringBuilder.toString());
     }
 
-    public static final List<Pair<Long, Long>> findGaps(String cubeName) {
+    public static final List<Pair<Long, Long>> findGaps(String cubeName, long maxGapAtOnce) {
         List<CubeSegment> segments = getSortedReadySegments(cubeName);
         List<Pair<Long, Long>> gaps = Lists.newArrayList();
         for (int i = 0; i < segments.size() - 1; ++i) {
@@ -83,7 +83,12 @@ public class StreamingMonitor {
             if (first.getDateRangeEnd() == second.getDateRangeStart()) {
                 continue;
             } else if (first.getDateRangeEnd() < second.getDateRangeStart()) {
-                gaps.add(Pair.newPair(first.getDateRangeEnd(), second.getDateRangeStart()));
+                long start = first.getDateRangeEnd();
+                while (start < second.getDateRangeStart()) {
+                    long end = Math.min(start + maxGapAtOnce, second.getDateRangeStart());
+                    gaps.add(Pair.newPair(start, end));
+                    start = end;
+                }
             }
         }
         return gaps;
@@ -119,7 +124,7 @@ public class StreamingMonitor {
             logger.info("cube:" + cubeName + " does not exist");
             return;
         }
-        List<Pair<Long, Long>> gaps = findGaps(cubeName);
+        List<Pair<Long, Long>> gaps = findGaps(cubeName, Long.MAX_VALUE);
         List<Pair<String, String>> overlaps = Lists.newArrayList();
         StringBuilder content = new StringBuilder();
         if (!gaps.isEmpty()) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/6dd49255/server/src/main/java/org/apache/kylin/rest/controller/ClusterController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/ClusterController.java b/server/src/main/java/org/apache/kylin/rest/controller/ClusterController.java
index 97fff36..86a0398 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/ClusterController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/ClusterController.java
@@ -19,23 +19,26 @@
 package org.apache.kylin.rest.controller;
 
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.job.JobInstance;
-import org.apache.kylin.job.constant.JobStatusEnum;
-import org.apache.kylin.job.constant.JobTimeFilterEnum;
-import org.apache.kylin.rest.exception.InternalErrorException;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.engine.streaming.StreamingManager;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
+import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.helix.HelixClusterAdmin;
-import org.apache.kylin.rest.request.JobListRequest;
-import org.apache.kylin.rest.service.JobService;
+import org.apache.kylin.rest.request.StreamingBuildRequest;
+import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.InitializingBean;
 import org.springframework.stereotype.Controller;
-import org.springframework.web.bind.annotation.PathVariable;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.ResponseBody;
 
-import java.util.*;
+import java.io.IOException;
+import java.util.Collection;
 
 /**
  * 
@@ -56,15 +59,37 @@ public class ClusterController extends BasicController implements InitializingBe
 
         final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
 
-        final HelixClusterAdmin clusterAdmin = HelixClusterAdmin.getInstance(kylinConfig);
-        clusterAdmin.start();
+        if (kylinConfig.isClusterEnabled()) {
+            final HelixClusterAdmin clusterAdmin = HelixClusterAdmin.getInstance(kylinConfig);
+            clusterAdmin.start();
 
-        Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
-            @Override
-            public void run() {
-                clusterAdmin.stop();
+            Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
+                @Override
+                public void run() {
+                    clusterAdmin.stop();
+                }
+            }));
+        } else {
+            String serverMode = kylinConfig.getServerMode();
+            if (Constant.SERVER_MODE_JOB.equals(serverMode.toLowerCase()) || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase())) {
+                logger.info("Initializing Job Engine ....");
+                new Thread(new Runnable() {
+                    @Override
+                    public void run() {
+                        try {
+                            DefaultScheduler scheduler = DefaultScheduler.createInstance();
+                            scheduler.init(new JobEngineConfig(kylinConfig), new ZookeeperJobLock());
+                            if (!scheduler.hasStarted()) {
+                                logger.error("scheduler has not been started");
+                                System.exit(1);
+                            }
+                        } catch (Exception e) {
+                            throw new RuntimeException(e);
+                        }
+                    }
+                }).start();
             }
-        }));
+        }
 
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/6dd49255/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java b/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
index 209c552..e33a1c9 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
@@ -24,9 +24,11 @@ import com.fasterxml.jackson.databind.JsonMappingException;
 import com.google.common.base.Preconditions;
 import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.util.JsonUtil;
+import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.engine.streaming.StreamingConfig;
+import org.apache.kylin.engine.streaming.monitor.StreamingMonitor;
 import org.apache.kylin.rest.exception.BadRequestException;
 import org.apache.kylin.rest.exception.ForbiddenException;
 import org.apache.kylin.rest.exception.InternalErrorException;
@@ -249,7 +251,12 @@ public class StreamingController extends BasicController {
         }
 
         streamingBuildRequest.setStreaming(streamingConfig.getName());
-        streamingService.buildStream(cube, streamingBuildRequest);
+        try {
+            streamingService.buildStream(cube, streamingBuildRequest);
+        } catch (IOException e) {
+            e.printStackTrace();
+            return streamingBuildRequest;
+        }
         streamingBuildRequest.setMessage("Build request is submitted successfully.");
         streamingBuildRequest.setSuccessful(true);
         return streamingBuildRequest;
@@ -274,13 +281,52 @@ public class StreamingController extends BasicController {
 
         StreamingBuildRequest streamingBuildRequest = new StreamingBuildRequest();
         streamingBuildRequest.setStreaming(streamingConfig.getName());
-        streamingService.fillGap(cube);
-        streamingBuildRequest.setMessage("FillGap request is submitted successfully.");
+        List<Pair<Long, Long>> gaps = null;
+        try {
+            gaps = streamingService.fillGap(cube);
+        } catch (IOException e) {
+            logger.error("", e);
+            return streamingBuildRequest;
+        }
+        streamingBuildRequest.setMessage("FillGap request is submitted successfully, gap number: " + gaps.size());
+        streamingBuildRequest.setSuccessful(true);
+        return streamingBuildRequest;
+
+    }
+
+    /**
+     * check wheter gap exists in a cube
+     *
+     * @param cubeName Cube Name
+     * @return
+     * @throws IOException
+     */
+    @RequestMapping(value = "/{cubeName}/checkgap", method = { RequestMethod.PUT })
+    @ResponseBody
+    public StreamingBuildRequest checkGap(@PathVariable String cubeName) {
+        StreamingConfig streamingConfig = streamingService.getStreamingManager().getStreamingConfigByCube(cubeName);
+        Preconditions.checkNotNull(streamingConfig, "Stream config for '" + cubeName + "' is not found.");
+        List<CubeInstance> cubes = cubeService.getCubes(cubeName, null, null, null, null);
+        Preconditions.checkArgument(cubes.size() == 1, "Cube '" + cubeName + "' is not found.");
+        CubeInstance cube = cubes.get(0);
+
+        List<Pair<Long, Long>> gaps = StreamingMonitor.findGaps(streamingConfig.getCubeName(), streamingConfig.getMaxGap());
+        logger.info("all gaps:" + StringUtils.join(gaps, ","));
+        
+        StreamingBuildRequest streamingBuildRequest = new StreamingBuildRequest();
+        streamingBuildRequest.setStreaming(streamingConfig.getName());
+        if (gaps.size() > 0) {
+            streamingBuildRequest.setMessage(gaps.size() + " gaps in cube: " + StringUtils.join(gaps, ","));
+        } else {
+            streamingBuildRequest.setMessage("No gap.");
+        }
         streamingBuildRequest.setSuccessful(true);
         return streamingBuildRequest;
 
     }
 
+    
+
     public void setStreamingService(StreamingService streamingService) {
         this.streamingService = streamingService;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/6dd49255/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java b/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
index 4da9a86..680e371 100644
--- a/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
+++ b/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
@@ -18,11 +18,9 @@
 package org.apache.kylin.rest.helix;
 
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.helix.*;
 import org.apache.helix.api.id.StateModelDefId;
 import org.apache.helix.controller.HelixControllerMain;
@@ -32,11 +30,11 @@ import org.apache.helix.tools.StateModelConfigGenerator;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.restclient.Broadcaster;
 import org.apache.kylin.common.util.StringUtil;
+import org.apache.kylin.engine.streaming.StreamingConfig;
+import org.apache.kylin.engine.streaming.StreamingManager;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.request.StreamingBuildRequest;
 import org.apache.kylin.storage.hbase.HBaseConnection;
-import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
-import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -68,7 +66,7 @@ public class HelixClusterAdmin {
 
     private static final Logger logger = LoggerFactory.getLogger(HelixClusterAdmin.class);
     private final String zkAddress;
-    private final ZKHelixAdmin admin;
+    private final HelixAdmin admin;
     private final String clusterName;
 
     private HelixClusterAdmin(KylinConfig kylinConfig) {
@@ -80,7 +78,7 @@ public class HelixClusterAdmin {
             zkAddress = HBaseConnection.getZKConnectString();
             logger.info("no 'kylin.zookeeper.address' in kylin.properties, use HBase zookeeper " + zkAddress);
         }
-        
+
         this.clusterName = kylinConfig.getClusterName();
         this.admin = new ZKHelixAdmin(zkAddress);
     }
@@ -130,24 +128,59 @@ public class HelixClusterAdmin {
 
     }
 
-    public void addStreamingJob(StreamingBuildRequest streamingBuildRequest) {
+    public void addStreamingJob(StreamingBuildRequest streamingBuildRequest) throws IOException {
         String resourceName = streamingBuildRequest.toResourceName();
-        if (admin.getResourcesInCluster(clusterName).contains(resourceName)) {
-            logger.warn("Resource '" + resourceName + "' already exists in cluster, remove and re-add.");
-            admin.dropResource(clusterName, resourceName);
+        if (!admin.getResourcesInCluster(clusterName).contains(resourceName)) {
+            logger.info("Resource '" + resourceName + "' is new, add it with 0 partitions in cluster.");
+            admin.addResource(clusterName, resourceName, 0, MODEL_LEADER_STANDBY, IdealState.RebalanceMode.FULL_AUTO.name());
         }
-        admin.addResource(clusterName, resourceName, 1, MODEL_LEADER_STANDBY, IdealState.RebalanceMode.FULL_AUTO.name());
-        rebalanceWithTag(resourceName, TAG_STREAM_BUILDER);
 
+        IdealState idealState = admin.getResourceIdealState(clusterName, resourceName);
+
+        StreamingConfig streamingConfig = StreamingManager.getInstance(kylinConfig).getStreamingConfig(streamingBuildRequest.getStreaming());
+        List<String> partitions = streamingConfig.getPartitions();
+        if (partitions == null) {
+            partitions = Lists.newArrayList();
+        }
+
+        if (partitions.size() != idealState.getNumPartitions() || idealState.getNumPartitions() >= kylinConfig.getClusterMaxPartitionPerRegion()) {
+            if (partitions.size() != idealState.getNumPartitions()) {
+                logger.error("Cluster resource partition number doesn't match with the partitions in StreamingConfig: " + resourceName);
+            } else {
+                logger.error("Partitions number for resource '" + resourceName + " exceeds the up limit: " + kylinConfig.getClusterMaxPartitionPerRegion());
+            }
+            logger.info("Drop and create resource: " + resourceName);
+            cleanResourcePartitions(resourceName);
+            idealState = admin.getResourceIdealState(clusterName, resourceName);
+            streamingConfig.getPartitions().clear();
+            StreamingManager.getInstance(kylinConfig).updateStreamingConfig(streamingConfig);
+            streamingConfig = StreamingManager.getInstance(kylinConfig).getStreamingConfig(streamingBuildRequest.getStreaming());
+            partitions = Lists.newArrayList();
+        }
+
+        partitions.add(streamingBuildRequest.toPartitionName());
+        streamingConfig.setPartitions(partitions);
+        StreamingManager.getInstance(kylinConfig).updateStreamingConfig(streamingConfig);
+
+        idealState.setNumPartitions(idealState.getNumPartitions() + 1);
+        admin.setResourceIdealState(clusterName, resourceName, idealState);
+        rebalanceWithTag(resourceName, TAG_STREAM_BUILDER);
     }
 
-    public void dropStreamingJob(String streamingName, long start, long end) {
-        String resourceName = RESOURCE_STREAME_CUBE_PREFIX + streamingName + "_" + start + "_" + end;
-        admin.dropResource(clusterName, resourceName);
+
+    private void cleanResourcePartitions(String resourceName) {
+        IdealState is = admin.getResourceIdealState(clusterName, resourceName);
+        is.getRecord().getListFields().clear();
+        is.getRecord().getMapFields().clear();
+        is.setNumPartitions(0);
+        admin.setResourceIdealState(clusterName, resourceName, is);
+
+        logger.info("clean all partitions in resource: " + resourceName);
     }
 
     /**
      * Start the instance and register the state model factory
+     *
      * @param instanceName
      * @throws Exception
      */
@@ -161,11 +194,11 @@ public class HelixClusterAdmin {
 
     /**
      * Rebalance the resource with the tags
+     *
      * @param tags
      */
     protected void rebalanceWithTag(String resourceName, String tag) {
-        List<String> instances = admin.getInstancesInClusterWithTag(clusterName, tag);
-        admin.rebalance(clusterName, resourceName, instances.size(), "", tag);
+        admin.rebalance(clusterName, resourceName, 2, null, tag);
     }
 
     /**
@@ -206,6 +239,7 @@ public class HelixClusterAdmin {
 
     /**
      * Check whether current kylin instance is in the leader role
+     *
      * @return
      */
     public boolean isLeaderRole(String resourceName) {
@@ -220,6 +254,7 @@ public class HelixClusterAdmin {
 
     /**
      * Add instance to cluster, with a tag list
+     *
      * @param instanceName should be unique in format: hostName_port
      * @param tags
      */

http://git-wip-us.apache.org/repos/asf/kylin/blob/6dd49255/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java b/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java
index 44d8302..705d8a7 100644
--- a/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java
+++ b/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java
@@ -11,6 +11,7 @@ import org.apache.kylin.common.KylinConfigBase;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.engine.streaming.StreamingConfig;
 import org.apache.kylin.engine.streaming.StreamingManager;
 import org.apache.kylin.rest.request.StreamingBuildRequest;
 import org.slf4j.Logger;
@@ -43,43 +44,81 @@ public class StreamCubeBuildTransitionHandler extends TransitionHandler {
     @Transition(to = "LEADER", from = "STANDBY")
     public void onBecomeLeaderFromStandby(Message message, NotificationContext context) {
         String resourceName = message.getResourceId().stringify();
-        StreamingBuildRequest streamingBuildRequest = StreamingBuildRequest.fromResourceName(resourceName);
+        final StreamingBuildRequest streamingBuildRequest = getStreamingBuildRequest(resourceName, message.getPartitionName());
+        if (streamingBuildRequest != null && isSuccessfullyBuilt(streamingBuildRequest) == false) {
+            KylinConfigBase.getKylinHome();
+            String segmentId = streamingBuildRequest.toPartitionName();
+            String cmd = KylinConfigBase.getKylinHome() + "/bin/kylin.sh streaming start " + streamingBuildRequest.getStreaming() + " " + segmentId + " -oneoff true -start " + streamingBuildRequest.getStart() + " -end " + streamingBuildRequest.getEnd() + " -streaming " + streamingBuildRequest.getStreaming();
+            runCMD(cmd);
+        }
+    }
 
-        final String cubeName = StreamingManager.getInstance(kylinConfig).getStreamingConfig(streamingBuildRequest.getStreaming()).getCubeName();
+    @Transition(to = "STANDBY", from = "LEADER")
+    public void onBecomeStandbyFromLeader(Message message, NotificationContext context) {
+        String resourceName = message.getResourceId().stringify();
+        logger.info("Partition " + message.getPartitionId() + " becomes as Standby");
+        /*
+        final StreamingBuildRequest streamingBuildRequest = getStreamingBuildRequest(resourceName, message.getPartitionName());
+        if (isSuccessfullyBuilt(streamingBuildRequest) == false) {
+            KylinConfigBase.getKylinHome();
+            String segmentId = streamingBuildRequest.toPartitionName();
+            String cmd = KylinConfigBase.getKylinHome() + "/bin/kylin.sh streaming stop " + streamingBuildRequest.getStreaming() + " " + segmentId;
+            runCMD(cmd);
+        }
+        */
+    }
+
+    private boolean isSuccessfullyBuilt(StreamingBuildRequest streamingBuildRequest) {
+        final StreamingConfig streamingConfig = StreamingManager.getInstance(kylinConfig).getStreamingConfig(streamingBuildRequest.getStreaming());
+        final String cubeName = streamingConfig.getCubeName();
         final CubeInstance cube = CubeManager.getInstance(kylinConfig).getCube(cubeName);
         for (CubeSegment segment : cube.getSegments()) {
             if (segment.getDateRangeStart() <= streamingBuildRequest.getStart() && segment.getDateRangeEnd() >= streamingBuildRequest.getEnd()) {
-                logger.info("Segment " + segment.getName() + " already exist, no need rebuild.");
-                return;
+                logger.info("Segment " + segment.getName() + " already exist.");
+                return true;
             }
         }
 
-        KylinConfigBase.getKylinHome();
-        String segmentId = streamingBuildRequest.getStart() + "_" + streamingBuildRequest.getEnd();
-        String cmd = KylinConfigBase.getKylinHome() + "/bin/kylin.sh streaming start " + streamingBuildRequest.getStreaming() + " " + segmentId + " -oneoff true -start " + streamingBuildRequest.getStart() + " -end " + streamingBuildRequest.getEnd() + " -streaming " + streamingBuildRequest.getStreaming();
-        logger.info("Executing: " + cmd);
-        try {
-            String line;
-            Process p = Runtime.getRuntime().exec(cmd);
-            BufferedReader input = new BufferedReader(new InputStreamReader(p.getInputStream()));
-            while ((line = input.readLine()) != null) {
-                logger.info(line);
+        return false;
+    }
+
+    private StreamingBuildRequest getStreamingBuildRequest(String resourceName, String partitionName) {
+        String streamConfigName = resourceName.substring(HelixClusterAdmin.RESOURCE_STREAME_CUBE_PREFIX.length());
+        int partitionId = Integer.parseInt(partitionName.substring(partitionName.lastIndexOf("_") + 1));
+
+        StreamingConfig streamingConfig = StreamingManager.getInstance(kylinConfig).getStreamingConfig(streamConfigName);
+
+        int retry = 0;
+        while ((streamingConfig.getPartitions() == null || streamingConfig.getPartitions().isEmpty() || partitionId > (streamingConfig.getPartitions().size() - 1) && retry < 10)) {
+            logger.error("No segment information in StreamingConfig '" + streamConfigName + "' for partition " + partitionId);
+            logger.error("Wait for 0.5 second...");
+            try {
+                Thread.sleep(500);
+            } catch (InterruptedException e) {
+                logger.error("", e);
             }
-            input.close();
-        } catch (IOException err) {
-            logger.error("Error happens during build streaming  '" + resourceName + "'", err);
-            throw new RuntimeException(err);
+            streamingConfig = StreamingManager.getInstance(kylinConfig).getStreamingConfig(streamConfigName);
+            retry++;
         }
 
+        if (retry >= 10) {
+            logger.error("No segment information in StreamingConfig '" + streamConfigName + "' for partition " + partitionId);
+            logger.warn("Abor building...");
+            return null;
+        }
+
+        String startEnd = streamingConfig.getPartitions().get(partitionId);
+        long start = Long.parseLong(startEnd.substring(0, startEnd.indexOf("_")));
+        long end = Long.parseLong(startEnd.substring(startEnd.indexOf("_") + 1));
+        StreamingBuildRequest request = new StreamingBuildRequest();
+        request.setStreaming(streamConfigName);
+        request.setStart(start);
+        request.setEnd(end);
+        return request;
+
     }
 
-    @Transition(to = "STANDBY", from = "LEADER")
-    public void onBecomeStandbyFromLeader(Message message, NotificationContext context) {
-        String resourceName = message.getResourceId().stringify();
-        StreamingBuildRequest streamingBuildRequest = StreamingBuildRequest.fromResourceName(resourceName);
-        KylinConfigBase.getKylinHome();
-        String segmentId = streamingBuildRequest.getStart() + "_" + streamingBuildRequest.getEnd();
-        String cmd = KylinConfigBase.getKylinHome() + "/bin/kylin.sh streaming stop " + streamingBuildRequest.getStreaming() + " " + segmentId;
+    private void runCMD(String cmd) {
         logger.info("Executing: " + cmd);
         try {
             String line;
@@ -90,9 +129,10 @@ public class StreamCubeBuildTransitionHandler extends TransitionHandler {
             }
             input.close();
         } catch (IOException err) {
-            logger.error("Error happens during build streaming  '" + resourceName + "'", err);
+            logger.error("Error happens when running '" + cmd + "'", err);
             throw new RuntimeException(err);
         }
+
     }
 
     @Transition(to = "STANDBY", from = "OFFLINE")
@@ -104,4 +144,17 @@ public class StreamCubeBuildTransitionHandler extends TransitionHandler {
     public void onBecomeOfflineFromStandby(Message message, NotificationContext context) {
 
     }
+
+    @Transition(to = "DROPPED", from = "OFFLINE")
+    public void onBecomeDroppedFromOffline(Message message, NotificationContext context)
+            throws Exception {
+        logger.info("Default OFFLINE->DROPPED transition invoked.");
+    }
+
+    @Transition(to = "OFFLINE", from = "DROPPED")
+    public void onBecomeOfflineFromDropped(Message message, NotificationContext context)
+            throws Exception {
+        logger.info("Default DROPPED->OFFLINE transition invoked.");
+    }
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/6dd49255/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java b/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java
index dcf91fd..201568e 100644
--- a/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java
+++ b/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java
@@ -81,16 +81,9 @@ public class StreamingBuildRequest {
     }
 
     public String toResourceName() {
-        return HelixClusterAdmin.RESOURCE_STREAME_CUBE_PREFIX + streaming + "_" + start + "_" + end;
+        return HelixClusterAdmin.RESOURCE_STREAME_CUBE_PREFIX + streaming;
     }
-
-    public static StreamingBuildRequest fromResourceName(String resourceName) {
-        Preconditions.checkArgument(resourceName.startsWith(RESOURCE_STREAME_CUBE_PREFIX));
-        long end = Long.parseLong(resourceName.substring(resourceName.lastIndexOf("_") + 1));
-        String temp = resourceName.substring(RESOURCE_STREAME_CUBE_PREFIX.length(), resourceName.lastIndexOf("_"));
-        long start = Long.parseLong(temp.substring(temp.lastIndexOf("_") + 1));
-        String streamingConfig = temp.substring(0, temp.lastIndexOf("_"));
-
-        return new StreamingBuildRequest(streamingConfig, start, end);
+    public String toPartitionName() {
+        return start + "_" + end;
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/6dd49255/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java b/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
index 7c2cc48..6e732d9 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.rest.service;
 
+import com.google.common.collect.Lists;
+import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.cube.CubeInstance;
@@ -98,20 +100,33 @@ public class StreamingService extends BasicService {
     }
 
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
-    public void buildStream(CubeInstance cube, StreamingBuildRequest streamingBuildRequest) {
+    public void buildStream(CubeInstance cube, StreamingBuildRequest streamingBuildRequest) throws IOException {
         HelixClusterAdmin clusterAdmin = HelixClusterAdmin.getInstance(KylinConfig.getInstanceFromEnv());
-        clusterAdmin.addStreamingJob(streamingBuildRequest);
+        try {
+            clusterAdmin.addStreamingJob(streamingBuildRequest);
+        } catch (IOException e) {
+            logger.error("", e);
+            streamingBuildRequest.setSuccessful(false);
+            streamingBuildRequest.setMessage("Failed to submit job for " + streamingBuildRequest.getStreaming());
+        }
     }
 
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
-    public void fillGap(CubeInstance cube) {
+    public List<Pair<Long, Long>> fillGap(CubeInstance cube) throws IOException {
         HelixClusterAdmin clusterAdmin = HelixClusterAdmin.getInstance(KylinConfig.getInstanceFromEnv());
         final StreamingConfig streamingConfig = StreamingManager.getInstance(KylinConfig.getInstanceFromEnv()).getStreamingConfigByCube(cube.getName());
-        final List<Pair<Long, Long>> gaps = StreamingMonitor.findGaps(streamingConfig.getCubeName());
-        logger.info("all gaps:" + org.apache.commons.lang3.StringUtils.join(gaps, ","));
-        for (Pair<Long, Long> gap : gaps) {
+        final List<Pair<Long, Long>> gaps = StreamingMonitor.findGaps(streamingConfig.getCubeName(), streamingConfig.getMaxGap());
+        logger.info("all gaps:" + StringUtils.join(gaps, ","));
+
+        List<Pair<Long, Long>> filledGap = Lists.newArrayList();
+        int max_gaps_at_one_time = streamingConfig.getMaxGapNumber();
+        for (int i = 0; i < Math.min(gaps.size(), max_gaps_at_one_time); i++) {
+            Pair<Long, Long> gap = gaps.get(i);
             StreamingBuildRequest streamingBuildRequest = new StreamingBuildRequest(streamingConfig.getName(), gap.getFirst(), gap.getSecond());
             clusterAdmin.addStreamingJob(streamingBuildRequest);
+            filledGap.add(gap);
         }
+
+        return filledGap;
     }
 }


[34/43] kylin git commit: KYLIN-1387 should support empty segment

Posted by sh...@apache.org.
KYLIN-1387 should support empty segment


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/5774a2e0
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/5774a2e0
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/5774a2e0

Branch: refs/heads/helix-rebase
Commit: 5774a2e00322640698775ad6615bb955cd98ca28
Parents: 036a75c
Author: shaofengshi <sh...@apache.org>
Authored: Tue Feb 16 09:42:34 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:20 2016 +0800

----------------------------------------------------------------------
 .../hbase/steps/SequenceFileCuboidWriter.java   | 48 +++++++++++---------
 1 file changed, 26 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/5774a2e0/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
index 4d76522..8c2d5e4 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
@@ -25,35 +25,39 @@ public class SequenceFileCuboidWriter extends KVGTRecordWriter {
 
     public SequenceFileCuboidWriter(CubeDesc cubeDesc, CubeSegment segment) {
         super(cubeDesc, segment);
+        try {
+            initiate();
+        } catch (IOException e) {
+           throw new RuntimeException(e);
+        }
     }
 
-
-    @Override
-    protected void writeAsKeyValue(ByteArrayWritable key, ByteArrayWritable value) throws IOException {
+    protected void initiate() throws IOException {
         if (writer == null) {
-            synchronized (SequenceFileCuboidWriter.class) {
-                if (writer == null) {
-                    JobBuilderSupport jobBuilderSupport = new JobBuilderSupport(cubeSegment, "SYSTEM");
-                    String cuboidRoot = jobBuilderSupport.getCuboidRootPath(cubeSegment);
-                    Path cuboidPath = new Path(cuboidRoot);
-                    FileSystem fs = HadoopUtil.getFileSystem(cuboidRoot);
-                    try {
-                        if (fs.exists(cuboidPath)) {
-                            fs.delete(cuboidPath, true);
-                        }
-
-                        fs.mkdirs(cuboidPath);
-                    } finally {
-                        IOUtils.closeQuietly(fs);
-                    }
-
-                    Path cuboidFile = new Path(cuboidPath, "data.seq");
-                    logger.debug("Cuboid is written to " + cuboidFile);
-                    writer = SequenceFile.createWriter(HadoopUtil.getCurrentConfiguration(), SequenceFile.Writer.file(cuboidFile), SequenceFile.Writer.keyClass(Text.class), SequenceFile.Writer.valueClass(Text.class));
+            JobBuilderSupport jobBuilderSupport = new JobBuilderSupport(cubeSegment, "SYSTEM");
+            String cuboidRoot = jobBuilderSupport.getCuboidRootPath(cubeSegment);
+            Path cuboidPath = new Path(cuboidRoot);
+            FileSystem fs = HadoopUtil.getFileSystem(cuboidRoot);
+            try {
+                if (fs.exists(cuboidPath)) {
+                    fs.delete(cuboidPath, true);
                 }
+
+                fs.mkdirs(cuboidPath);
+            } finally {
+                IOUtils.closeQuietly(fs);
             }
+
+            Path cuboidFile = new Path(cuboidPath, "data.seq");
+            logger.debug("Cuboid is written to " + cuboidFile);
+            writer = SequenceFile.createWriter(HadoopUtil.getCurrentConfiguration(), SequenceFile.Writer.file(cuboidFile), SequenceFile.Writer.keyClass(Text.class), SequenceFile.Writer.valueClass(Text.class));
         }
 
+    }
+
+    @Override
+    protected void writeAsKeyValue(ByteArrayWritable key, ByteArrayWritable value) throws IOException {
+       
         Text outputValue = new Text();
         Text outputKey = new Text();
         outputKey.set(key.array(), key.offset(), key.length());


[08/43] kylin git commit: KYLIN-1458: Checking the consistency of cube segment host with the environment after cube migration

Posted by sh...@apache.org.
KYLIN-1458: Checking the consistency of cube segment host with the environment after cube migration

Signed-off-by: Hongbin Ma <ma...@apache.org>

KYLIN-1458: Add the auto fix if inconsistency exists after cube migration

Signed-off-by: Hongbin Ma <ma...@apache.org>

KYLIN-1458: rename parameters for avoiding misleading

Signed-off-by: Hongbin Ma <ma...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/ca597955
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/ca597955
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/ca597955

Branch: refs/heads/helix-rebase
Commit: ca597955477650a388f882b968e25499b02a4bf0
Parents: 5474fe4
Author: yangzhong <ya...@ebay.com>
Authored: Mon Feb 29 09:37:06 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Tue Mar 1 13:38:02 2016 +0800

----------------------------------------------------------------------
 .../storage/hbase/util/CubeMigrationCLI.java    |   7 +
 .../hbase/util/CubeMigrationCheckCLI.java       | 195 +++++++++++++++++++
 2 files changed, 202 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/ca597955/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
index caf79b2..09aab48 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
@@ -141,6 +141,8 @@ public class CubeMigrationCLI {
         } else {
             showOpts();
         }
+
+        checkMigrationSuccess(dstConfig, cubeName, true);
     }
 
     public static void moveCube(String srcCfgUri, String dstCfgUri, String cubeName, String projectName, String copyAcl, String purgeAndDisable, String overwriteIfExists, String realExecute) throws IOException, InterruptedException {
@@ -148,6 +150,11 @@ public class CubeMigrationCLI {
         moveCube(KylinConfig.createInstanceFromUri(srcCfgUri), KylinConfig.createInstanceFromUri(dstCfgUri), cubeName, projectName, copyAcl, purgeAndDisable, overwriteIfExists, realExecute);
     }
 
+    public static void checkMigrationSuccess(KylinConfig kylinConfig, String cubeName, Boolean ifFix) throws IOException{
+        CubeMigrationCheckCLI checkCLI = new CubeMigrationCheckCLI(kylinConfig, ifFix);
+        checkCLI.execute(cubeName);
+    }
+
     private static String checkAndGetHbaseUrl() {
         String srcMetadataUrl = srcConfig.getMetadataUrl();
         String dstMetadataUrl = dstConfig.getMetadataUrl();

http://git-wip-us.apache.org/repos/asf/kylin/blob/ca597955/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
new file mode 100644
index 0000000..2762561
--- /dev/null
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
@@ -0,0 +1,195 @@
+package org.apache.kylin.storage.hbase.util;
+
+import com.google.common.collect.Lists;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.OptionsHelper;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.metadata.realization.IRealizationConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * <p/>
+ * This tool serves for the purpose of
+ * checking the "KYLIN_HOST" property to be consistent with the dst's MetadataUrlPrefix
+ * for all of cube segments' corresponding HTables after migrating a cube
+ * <p/>
+ */
+public class CubeMigrationCheckCLI {
+
+    private static final Logger logger = LoggerFactory.getLogger(CubeMigrationCheckCLI.class);
+
+    private static final Option OPTION_FIX = OptionBuilder.withArgName("fix").hasArg().isRequired(false).withDescription("Fix the inconsistent cube segments' HOST").create("fix");
+
+    private static final Option OPTION_DST_CFG_URI = OptionBuilder.withArgName("dstCfgUri").hasArg().isRequired(false).withDescription("The KylinConfig of the cube’s new home").create("dstCfgUri");
+
+    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("The name of cube migrated").create("cube");
+
+
+    private KylinConfig dstCfg;
+    private HBaseAdmin hbaseAdmin;
+
+    private List<String> issueExistHTables;
+    private List<String> inconsistentHTables;
+    
+    private boolean ifFix = false;
+
+    public static void main(String[] args) throws ParseException, IOException {
+
+        OptionsHelper optionsHelper = new OptionsHelper();
+
+        Options options = new Options();
+        options.addOption(OPTION_FIX);
+        options.addOption(OPTION_DST_CFG_URI);
+        options.addOption(OPTION_CUBE);
+
+        boolean ifFix = false;
+        String dstCfgUri;
+        String cubeName;
+        logger.info("jobs args: " + Arrays.toString(args));
+        try {
+
+            optionsHelper.parseOptions(options, args);
+
+            logger.info("options: '" + options.toString() + "'");
+            logger.info("option value 'fix': '" + optionsHelper.getOptionValue(OPTION_FIX) + "'");
+            ifFix = Boolean.parseBoolean(optionsHelper.getOptionValue(OPTION_FIX));
+
+            logger.info("option value 'dstCfgUri': '" + optionsHelper.getOptionValue(OPTION_DST_CFG_URI) + "'");
+            dstCfgUri = optionsHelper.getOptionValue(OPTION_DST_CFG_URI);
+
+            logger.info("option value 'cube': '" + optionsHelper.getOptionValue(OPTION_CUBE) + "'");
+            cubeName = optionsHelper.getOptionValue(OPTION_CUBE);
+
+        } catch (ParseException e) {
+            optionsHelper.printUsage(CubeMigrationCheckCLI.class.getName(), options);
+            throw e;
+        }
+
+        KylinConfig kylinConfig;
+        if(dstCfgUri==null){
+            kylinConfig = KylinConfig.getInstanceFromEnv();
+        }else{
+            kylinConfig = KylinConfig.createInstanceFromUri(dstCfgUri);
+        }
+
+        CubeMigrationCheckCLI checkCLI = new CubeMigrationCheckCLI(kylinConfig, ifFix);
+        checkCLI.execute(cubeName);
+    }
+
+    public void execute() throws IOException{
+        execute(null);
+    }
+
+    public void execute(String cubeName) throws IOException{
+        if(cubeName==null){
+            checkAll();
+        }else {
+            checkCube(cubeName);
+        }
+        fixInconsistent();
+        printIssueExistingHTables();
+    }
+
+    public CubeMigrationCheckCLI(KylinConfig kylinConfig, Boolean isFix) throws IOException{
+        this.dstCfg = kylinConfig;
+        this.ifFix = isFix;
+
+        Configuration conf = HBaseConfiguration.create();
+        hbaseAdmin = new HBaseAdmin(conf);
+
+        issueExistHTables = Lists.newArrayList();
+        inconsistentHTables = Lists.newArrayList();
+    }
+
+    public void checkCube(String cubeName) {
+        List<String> segFullNameList = Lists.newArrayList();
+
+        CubeInstance cube = CubeManager.getInstance(dstCfg).getCube(cubeName);
+        addHTableNamesForCube(cube,segFullNameList);
+
+        check(segFullNameList);
+    }
+
+    public void checkAll(){
+        List<String> segFullNameList = Lists.newArrayList();
+
+        CubeManager cubeMgr = CubeManager.getInstance(dstCfg);
+        for (CubeInstance cube : cubeMgr.listAllCubes()) {
+            addHTableNamesForCube(cube, segFullNameList);
+        }
+
+        check(segFullNameList);
+    }
+
+    public void addHTableNamesForCube(CubeInstance cube, List<String> segFullNameList){
+        for (CubeSegment seg : cube.getSegments()) {
+            String tableName = seg.getStorageLocationIdentifier();
+            segFullNameList.add(tableName+","+cube.getName());
+        }
+    }
+
+    public void check(List<String> segFullNameList){
+        issueExistHTables = Lists.newArrayList();
+        inconsistentHTables = Lists.newArrayList();
+
+        for(String segFullName:segFullNameList){
+            String[] sepNameList = segFullName.split(",");
+            try {
+                HTableDescriptor hTableDescriptor = hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0]));
+                String host = hTableDescriptor.getValue(IRealizationConstants.HTableTag);
+                if(!dstCfg.getMetadataUrlPrefix().equalsIgnoreCase(host)){
+                    inconsistentHTables.add(segFullName);
+                }
+            }catch (IOException e){
+                issueExistHTables.add(segFullName);
+                continue;
+            }
+        }
+    }
+
+    public void fixInconsistent() throws IOException{
+        if(ifFix == true){
+            for(String segFullName : inconsistentHTables){
+                String[] sepNameList = segFullName.split(",");
+                HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0]));
+                logger.info("Change the host of htable "+sepNameList[0]+"belonging to cube "+sepNameList[1]+" from "+desc.getValue(IRealizationConstants.HTableTag)+" to "+dstCfg.getMetadataUrlPrefix());
+                hbaseAdmin.disableTable(sepNameList[0]);
+                desc.setValue(IRealizationConstants.HTableTag, dstCfg.getMetadataUrlPrefix());
+                hbaseAdmin.modifyTable(sepNameList[0], desc);
+                hbaseAdmin.enableTable(sepNameList[0]);
+            }
+        }else{
+            logger.info("------ Inconsistent HTables Needed To Be Fixed ------");
+            for (String hTable : inconsistentHTables) {
+                String[] sepNameList = hTable.split(",");
+                logger.info(sepNameList[0]+" belonging to cube "+sepNameList[1]);
+            }
+            logger.info("----------------------------------------------------");
+        }
+    }
+
+    public void printIssueExistingHTables(){
+        logger.info("------ HTables exist issues in hbase : not existing, metadata broken ------");
+        for(String segFullName : issueExistHTables){
+            String[] sepNameList = segFullName.split(",");
+            logger.error(sepNameList[0]+" belonging to cube "+sepNameList[1]+" has some issues and cannot be read successfully!!!");
+        }
+        logger.info("----------------------------------------------------");
+    }
+}


[31/43] kylin git commit: KYLIN-1311 fix small bug

Posted by sh...@apache.org.
KYLIN-1311 fix small bug


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/b5ee2df7
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/b5ee2df7
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/b5ee2df7

Branch: refs/heads/helix-rebase
Commit: b5ee2df7a0d2e674f06deebeab6e42defdbd82ae
Parents: c38efff
Author: shaofengshi <sh...@apache.org>
Authored: Fri Jan 15 17:57:26 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:19 2016 +0800

----------------------------------------------------------------------
 .../engine/streaming/StreamingManager.java      | 11 +++++-----
 .../rest/controller/StreamingController.java    | 13 ++++++------
 .../helix/LeaderStandbyStateModelFactory.java   | 21 +++++++++++++++++---
 3 files changed, 30 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/b5ee2df7/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java
index e0b086d..5c1c11e 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java
@@ -108,6 +108,12 @@ public class StreamingManager {
         return streamingMap.get(name);
     }
 
+    public StreamingConfig getStreamingConfigByCube(String cubeName) {
+        String streamingConfig = cubeName + "_streaming";
+        return getStreamingConfig(streamingConfig);
+    }
+
+
     public List<StreamingConfig> listAllStreaming() {
         return new ArrayList<>(streamingMap.values());
     }
@@ -139,11 +145,6 @@ public class StreamingManager {
         streamingMap.remove(streamingConfig.getName());
     }
 
-    public StreamingConfig getConfig(String name) {
-        name = name.toUpperCase();
-        return streamingMap.get(name);
-    }
-
     public void removeStreamingLocal(String streamingName) {
         streamingMap.removeLocal(streamingName);
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/b5ee2df7/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java b/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
index 57831d5..fb806d1 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
@@ -236,13 +236,11 @@ public class StreamingController extends BasicController {
      * @return
      * @throws IOException
      */
-    @RequestMapping(value = "/{streamingName}/build", method = {RequestMethod.PUT})
+    @RequestMapping(value = "/{cubeName}/build", method = {RequestMethod.PUT})
     @ResponseBody
-    public StreamingBuildRequest buildStream(@PathVariable String streamingName, @RequestBody StreamingBuildRequest streamingBuildRequest) {
-        streamingBuildRequest.setStreaming(streamingName);
-        StreamingConfig streamingConfig = streamingService.getStreamingManager().getConfig(streamingName);
-        Preconditions.checkNotNull(streamingConfig, "Stream config '" + streamingName + "' is not found.");
-        String cubeName = streamingConfig.getCubeName();
+    public StreamingBuildRequest buildStream(@PathVariable String cubeName, @RequestBody StreamingBuildRequest streamingBuildRequest) {
+        StreamingConfig streamingConfig = streamingService.getStreamingManager().getStreamingConfigByCube(cubeName);
+        Preconditions.checkNotNull(streamingConfig, "Stream config for '" + cubeName + "' is not found.");
         List<CubeInstance> cubes = cubeService.getCubes(cubeName, null, null, null, null);
         Preconditions.checkArgument(cubes.size() == 1, "Cube '" + cubeName + "' is not found.");
         CubeInstance cube = cubes.get(0);
@@ -257,7 +255,8 @@ public class StreamingController extends BasicController {
             }
         }
 
-        streamingService.buildStream(streamingName, streamingBuildRequest);
+        streamingBuildRequest.setStreaming(streamingConfig.getName());
+        streamingService.buildStream(cubeName, streamingBuildRequest);
         streamingBuildRequest.setMessage("Build request is submitted successfully.");
         streamingBuildRequest.setSuccessful(true);
         return streamingBuildRequest;

http://git-wip-us.apache.org/repos/asf/kylin/blob/b5ee2df7/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java b/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
index df23ea0..8614e8c 100644
--- a/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
+++ b/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
@@ -10,6 +10,10 @@ import org.apache.helix.model.Message;
 import org.apache.helix.participant.statemachine.Transition;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.KylinConfigBase;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.engine.streaming.StreamingManager;
 import org.apache.kylin.job.engine.JobEngineConfig;
 import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
 import org.apache.kylin.job.lock.MockJobLock;
@@ -48,7 +52,7 @@ public class LeaderStandbyStateModelFactory extends StateTransitionHandlerFactor
         public void onBecomeLeaderFromStandby(Message message, NotificationContext context) {
             logger.info("JobEngineStateModel.onBecomeLeaderFromStandby()");
             try {
-                KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+                final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
                 DefaultScheduler scheduler = DefaultScheduler.createInstance();
                 scheduler.init(new JobEngineConfig(kylinConfig), new MockJobLock());
                 while (!scheduler.hasStarted()) {
@@ -89,11 +93,22 @@ public class LeaderStandbyStateModelFactory extends StateTransitionHandlerFactor
         public void onBecomeLeaderFromStandby(Message message, NotificationContext context) {
             String resourceName = message.getResourceId().stringify();
             Preconditions.checkArgument(resourceName.startsWith(RESOURCE_STREAME_CUBE_PREFIX));
-            long end = Long.parseLong(resourceName.substring(resourceName.lastIndexOf("_")) + 1);
+            long end = Long.parseLong(resourceName.substring(resourceName.lastIndexOf("_") + 1));
             String temp = resourceName.substring(RESOURCE_STREAME_CUBE_PREFIX.length(), resourceName.lastIndexOf("_"));
-            long start = Long.parseLong(temp.substring(temp.lastIndexOf("_")) + 1);
+            long start = Long.parseLong(temp.substring(temp.lastIndexOf("_") + 1));
             String streamingConfig = temp.substring(0, temp.lastIndexOf("_"));
 
+            final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+            
+            final String cubeName = StreamingManager.getInstance(kylinConfig).getStreamingConfig(streamingConfig).getCubeName();
+            final CubeInstance cube = CubeManager.getInstance(kylinConfig).getCube(cubeName);
+            for (CubeSegment segment : cube.getSegments()) {
+                if (segment.getDateRangeStart() <= start && segment.getDateRangeEnd() >= end) {
+                    logger.info("Segment " + segment.getName() + " already exist, no need rebuild.");
+                    return;
+                }
+            }
+            
             KylinConfigBase.getKylinHome();
             String segmentId = start + "_" + end;
             String cmd = KylinConfigBase.getKylinHome() + "/bin/kylin.sh streaming start " + streamingConfig + " " + segmentId + " -oneoff true -start " + start + " -end " + end + " -streaming " + streamingConfig;


[28/43] kylin git commit: remove getClusterName() from KylinConfig.java

Posted by sh...@apache.org.
remove getClusterName() from KylinConfig.java


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/436db28b
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/436db28b
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/436db28b

Branch: refs/heads/helix-rebase
Commit: 436db28b78845971f130e960d201f73acd2eff40
Parents: 0d7f9e6
Author: shaofengshi <sh...@apache.org>
Authored: Wed Dec 30 16:09:32 2015 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:19 2016 +0800

----------------------------------------------------------------------
 .../src/main/java/org/apache/kylin/common/KylinConfig.java       | 4 ----
 1 file changed, 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/436db28b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
index ea77e47..81f5827 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
@@ -264,9 +264,5 @@ public class KylinConfig extends KylinConfigBase {
             out.println(key + "=" + val);
         }
     }
-    
-    public String getClusterName() {
-        return this.getOptional("kylin.cluster.name", getMetadataUrlPrefix());
-    }
 
 }


[32/43] kylin git commit: KYLIN-1311 fix unit tests after rebase

Posted by sh...@apache.org.
KYLIN-1311 fix unit tests after rebase


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/c38efff5
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/c38efff5
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/c38efff5

Branch: refs/heads/helix-rebase
Commit: c38efff5a38e2782cc8787e31466e1cc2976429e
Parents: ab60480
Author: shaofengshi <sh...@apache.org>
Authored: Fri Jan 15 14:44:27 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:19 2016 +0800

----------------------------------------------------------------------
 build/conf/kylin.properties                     |  15 +-
 .../apache/kylin/common/KylinConfigBase.java    |   6 +-
 .../job/impl/threadpool/DefaultScheduler.java   |  27 +-
 .../job/impl/threadpool/BaseSchedulerTest.java  |   2 +-
 .../test_case_data/sandbox/kylin.properties     |  10 +-
 .../kylin/provision/BuildCubeWithEngine.java    |   2 +-
 .../kylin/provision/BuildCubeWithSpark.java     |   2 +-
 .../kylin/provision/BuildIIWithEngine.java      |   2 +-
 pom.xml                                         |  14 +-
 server/pom.xml                                  |  32 +++
 .../java/org/apache/kylin/rest/DebugTomcat.java |   4 +-
 .../kylin/rest/controller/JobController.java    |  50 ++--
 .../kylin/rest/helix/HelixClusterAdmin.java     |  25 +-
 .../apache/kylin/rest/service/CubeService.java  |   7 +-
 .../rest/controller/JobControllerTest.java      | 245 ++++++++++---------
 .../kylin/rest/helix/HelixClusterAdminTest.java |   4 +-
 .../kylin/rest/service/CacheServiceTest.java    |  18 --
 .../kylin/storage/hbase/HBaseConnection.java    |  17 ++
 .../storage/hbase/util/ZookeeperJobLock.java    |  25 +-
 19 files changed, 289 insertions(+), 218 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/build/conf/kylin.properties
----------------------------------------------------------------------
diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
index bbfa7c8..558c2f0 100644
--- a/build/conf/kylin.properties
+++ b/build/conf/kylin.properties
@@ -1,12 +1,16 @@
 ## Cluster related properties ##
-# Required, comma separated list of zk servers; 
+# Whether this kylin run as an instance of a cluster
+kylin.cluster.enabled=false
+
+# Comma separated list of zk servers; 
+# Optional; if absent, will use HBase zookeeper; set if use a different zk;
 kylin.zookeeper.address=
 
-# rest address of this instance, ;
+# REST address of this instance, need be accessible from other instances;
 # optional, default be <hostname>:7070
 kylin.rest.address=
 
-# whether run a cluster controller in this node
+# whether run a cluster controller in this instance; a robust cluster need at least 3 controllers.
 kylin.cluster.controller=true
 
 # optional information for the owner of kylin platform, it can be your team's email
@@ -14,10 +18,11 @@ kylin.cluster.controller=true
 kylin.owner=whoami@kylin.apache.org
 
 # List of web servers in use, this enables one web server instance to sync up with other servers.
-# Deprecated, cluster will self-discover and update this.
+# Deprecated, cluster will self-discover and update this property automatically.
 # kylin.rest.servers=localhost:7070
 
-# Server mode: all, job, query
+# Server mode: all, job, query, stream.
+# The role of this instance; 
 kylin.server.mode=all
 
 # The metadata store in hbase

http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 6d3ac0d..6f535f2 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -553,13 +553,17 @@ public class KylinConfigBase implements Serializable {
     public void setClusterName(String clusterName) {
         setProperty("kylin.cluster.name", clusterName);
     }
+
+    public boolean isClusterEnabled() {
+        return Boolean.parseBoolean(getOptional("kylin.cluster.enabled", "false"));
+    }
     
     public boolean isClusterController() {
         return Boolean.parseBoolean(getOptional("kylin.cluster.controller", "true"));
     }
     
     public String getRestAddress() {
-        return this.getOptional("kylin.rest.address");
+        return this.getOptional("kylin.rest.address", "localhost:7070");
     }
 
     public void setRestAddress(String restAddress) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java
index 2915c60..61936a5 100644
--- a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java
+++ b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java
@@ -55,12 +55,12 @@ public class DefaultScheduler implements Scheduler<AbstractExecutable>, Connecti
     private ExecutorService jobPool;
     private DefaultContext context;
 
-    private Logger logger = LoggerFactory.getLogger(DefaultScheduler.class);
+    private static final Logger logger = LoggerFactory.getLogger(DefaultScheduler.class);
     private volatile boolean initialized = false;
     private volatile boolean hasStarted = false;
     private JobEngineConfig jobEngineConfig;
 
-    private static final DefaultScheduler INSTANCE = new DefaultScheduler();
+    private static DefaultScheduler INSTANCE;
 
     private DefaultScheduler() {
     }
@@ -134,10 +134,6 @@ public class DefaultScheduler implements Scheduler<AbstractExecutable>, Connecti
         }
     }
 
-    public static DefaultScheduler getInstance() {
-        return INSTANCE;
-    }
-
     @Override
     public void stateChanged(CuratorFramework client, ConnectionState newState) {
         if ((newState == ConnectionState.SUSPENDED) || (newState == ConnectionState.LOST)) {
@@ -149,6 +145,25 @@ public class DefaultScheduler implements Scheduler<AbstractExecutable>, Connecti
         }
     }
 
+    public synchronized static DefaultScheduler createInstance() {
+        destroyInstance();
+        INSTANCE = new DefaultScheduler();
+        return INSTANCE;
+    }
+
+    public synchronized static void destroyInstance() {
+        DefaultScheduler tmp = INSTANCE;
+        INSTANCE = null;
+        if (tmp != null) {
+            try {
+                tmp.shutdown();
+            } catch (SchedulerException e) {
+                logger.error("error stop DefaultScheduler", e);
+                throw new RuntimeException(e);
+            }
+        }
+    }
+
     @Override
     public synchronized void init(JobEngineConfig jobEngineConfig, final JobLock jobLock) throws SchedulerException {
         if (!initialized) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java
----------------------------------------------------------------------
diff --git a/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java b/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java
index ecac973..4e092a1 100644
--- a/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java
+++ b/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/BaseSchedulerTest.java
@@ -45,7 +45,7 @@ public abstract class BaseSchedulerTest extends LocalFileMetadataTestCase {
         createTestMetadata();
         setFinalStatic(ExecutableConstants.class.getField("DEFAULT_SCHEDULER_INTERVAL_SECONDS"), 10);
         jobService = ExecutableManager.getInstance(KylinConfig.getInstanceFromEnv());
-        scheduler = DefaultScheduler.getInstance();
+        scheduler = DefaultScheduler.createInstance();
         scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv()), new MockJobLock());
         if (!scheduler.hasStarted()) {
             throw new RuntimeException("scheduler has not been started");

http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/examples/test_case_data/sandbox/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
index 1878e0a..798206c 100644
--- a/examples/test_case_data/sandbox/kylin.properties
+++ b/examples/test_case_data/sandbox/kylin.properties
@@ -1,10 +1,15 @@
 ## Config for Kylin Engine ##
+kylin.cluster.enabled=false
+
+# Required, comma separated list of zk servers; 
+kylin.zookeeper.address=sandbox:2181
+
+# whether run a cluster controller in this node
+kylin.cluster.controller=true
 
 # optional information for the owner of kylin platform, it can be your team's email
 # currently it will be attached to each kylin's htable attribute
 kylin.owner=whoami@kylin.apache.org
-
-kylin.zookeeper.address=sandbox:2181
 # List of web servers in use, this enables one web server instance to sync up with other servers.
 kylin.rest.servers=localhost:7070
 
@@ -12,7 +17,6 @@ kylin.rest.servers=localhost:7070
 kylin.rest.timezone=GMT-8
 
 kylin.server.mode=all
->>>>>>> KYLIN-1188 use helix 0.7.1 to manage the job engine assignment
 # The metadata store in hbase
 kylin.metadata.url=kylin_metadata@hbase
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
index cfefef3..5c3883e 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
@@ -129,7 +129,7 @@ public class BuildCubeWithEngine {
 
         final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         jobService = ExecutableManager.getInstance(kylinConfig);
-        scheduler = DefaultScheduler.getInstance();
+        scheduler = DefaultScheduler.createInstance();
         scheduler.init(new JobEngineConfig(kylinConfig), new ZookeeperJobLock());
         if (!scheduler.hasStarted()) {
             throw new RuntimeException("scheduler has not been started");

http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithSpark.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithSpark.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithSpark.java
index 5ab5e83..aa48cea 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithSpark.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithSpark.java
@@ -100,7 +100,7 @@ public class BuildCubeWithSpark {
         for (String jobId : jobService.getAllJobIds()) {
             jobService.deleteJob(jobId);
         }
-        scheduler = DefaultScheduler.getInstance();
+        scheduler = DefaultScheduler.createInstance();
         scheduler.init(new JobEngineConfig(kylinConfig), new MockJobLock());
         if (!scheduler.hasStarted()) {
             throw new RuntimeException("scheduler has not been started");

http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/kylin-it/src/test/java/org/apache/kylin/provision/BuildIIWithEngine.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildIIWithEngine.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildIIWithEngine.java
index 4b8ce24..08640d0 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildIIWithEngine.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildIIWithEngine.java
@@ -108,7 +108,7 @@ public class BuildIIWithEngine {
 
         final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         jobService = ExecutableManager.getInstance(kylinConfig);
-        scheduler = DefaultScheduler.getInstance();
+        scheduler = DefaultScheduler.createInstance();
         scheduler.init(new JobEngineConfig(kylinConfig), new ZookeeperJobLock());
         if (!scheduler.hasStarted()) {
             throw new RuntimeException("scheduler has not been started");

http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 24b0dd9..c6a13d5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -464,14 +464,22 @@
       	    		<groupId>org.apache.httpcomponents</groupId>
 	              <artifactId>httpclient</artifactId>
  	          	  <version>${apache-httpclient.version}</version>
- 	     	    </dependency>
-
+            </dependency>
             <dependency>
                 <groupId>org.roaringbitmap</groupId>
                 <artifactId>RoaringBitmap</artifactId>
                 <version>${roaring.version}</version>
             </dependency>
-
+            <dependency>
+                <groupId>org.apache.helix</groupId>
+                <artifactId>helix-core</artifactId>
+                <version>${helix.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.helix</groupId>
+                <artifactId>helix-examples</artifactId>
+                <version>${helix.version}</version>
+            </dependency>
         </dependencies>
     </dependencyManagement>
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/server/pom.xml
----------------------------------------------------------------------
diff --git a/server/pom.xml b/server/pom.xml
index 7c1d58a..86ec5a5 100644
--- a/server/pom.xml
+++ b/server/pom.xml
@@ -443,6 +443,38 @@
             </exclusions>
         </dependency>
         <dependency>
+            <groupId>org.apache.helix</groupId>
+            <artifactId>helix-core</artifactId>
+            <exclusions>
+                <exclusion>
+                    <groupId>com.101tec</groupId>
+                    <artifactId>zkclient</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>com.github.sgroschupf</groupId>
+                    <artifactId>zkclient</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>com.101tec</groupId>
+            <artifactId>zkclient</artifactId>
+            <version>0.5</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.zookeeper</groupId>
+            <artifactId>zookeeper</artifactId>
+            <version>${zookeeper.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>junit</groupId>
+                    <artifactId>junit</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        
+        <dependency>
             <groupId>com.h2database</groupId>
             <artifactId>h2</artifactId>
             <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java b/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
index 139cddc..b239867 100644
--- a/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
+++ b/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
@@ -30,7 +30,7 @@ import org.apache.catalina.startup.Tomcat;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.util.Shell;
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.HostnameUtils;
+//import org.apache.kylin.common.util.HostnameUtils;
 import org.apache.kylin.rest.util.ClasspathUtil;
 
 public class DebugTomcat {
@@ -46,8 +46,6 @@ public class DebugTomcat {
 
             System.setProperty("spring.profiles.active", "testing");
 
-            System.setProperty("kylin.rest.address", HostnameUtils.getHostname() + ":" + "7070");
-
             //avoid log permission issue
             if (System.getProperty("catalina.home") == null)
                 System.setProperty("catalina.home", ".");

http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/JobController.java b/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
index 741b5ee..77d987f 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
@@ -18,23 +18,17 @@
 
 package org.apache.kylin.rest.controller;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.TimeZone;
-
-import com.google.common.base.Preconditions;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.job.JobInstance;
 import org.apache.kylin.job.constant.JobStatusEnum;
 import org.apache.kylin.job.constant.JobTimeFilterEnum;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
 import org.apache.kylin.rest.exception.InternalErrorException;
 import org.apache.kylin.rest.helix.HelixClusterAdmin;
 import org.apache.kylin.rest.request.JobListRequest;
 import org.apache.kylin.rest.service.JobService;
+import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.InitializingBean;
@@ -74,16 +68,34 @@ public class JobController extends BasicController implements InitializingBean {
 
         final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
 
-        Preconditions.checkNotNull(kylinConfig.getZookeeperAddress(), "'kylin.zookeeper.address' couldn't be null, set it in kylin.properties.");
-        final HelixClusterAdmin clusterAdmin = HelixClusterAdmin.getInstance(kylinConfig);
-        clusterAdmin.start();
-        
-        Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
-            @Override
-            public void run() {
-                clusterAdmin.stop();
-            }
-        }));
+        if (kylinConfig.isClusterEnabled() == true) {
+            logger.info("Kylin cluster enabled, will use Helix/zookeeper to coordinate.");
+            final HelixClusterAdmin clusterAdmin = HelixClusterAdmin.getInstance(kylinConfig);
+            clusterAdmin.start();
+
+            Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
+                @Override
+                public void run() {
+                    clusterAdmin.stop();
+                }
+            }));
+        } else {
+            new Thread(new Runnable() {
+                @Override
+                public void run() {
+                    try {
+                        DefaultScheduler scheduler = DefaultScheduler.createInstance();
+                        scheduler.init(new JobEngineConfig(kylinConfig), new ZookeeperJobLock());
+                        if (!scheduler.hasStarted()) {
+                            logger.error("scheduler has not been started");
+                            System.exit(1);
+                        }
+                    } catch (Exception e) {
+                        throw new RuntimeException(e);
+                    }
+                }
+            }).start();
+        }
 
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java b/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
index f62204d..9850e24 100644
--- a/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
+++ b/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
@@ -18,10 +18,11 @@
 package org.apache.kylin.rest.helix;
 
 import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import joptsimple.internal.Strings;
 import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.helix.*;
 import org.apache.helix.api.id.StateModelDefId;
 import org.apache.helix.controller.HelixControllerMain;
@@ -30,7 +31,10 @@ import org.apache.helix.model.*;
 import org.apache.helix.tools.StateModelConfigGenerator;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.restclient.Broadcaster;
+import org.apache.kylin.common.util.StringUtil;
 import org.apache.kylin.rest.constant.Constant;
+import org.apache.kylin.storage.hbase.HBaseConnection;
+import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -65,7 +69,14 @@ public class HelixClusterAdmin {
 
     private HelixClusterAdmin(KylinConfig kylinConfig) {
         this.kylinConfig = kylinConfig;
-        this.zkAddress = kylinConfig.getZookeeperAddress();
+
+        if (kylinConfig.getZookeeperAddress() != null) {
+            this.zkAddress = kylinConfig.getZookeeperAddress();
+        } else {
+            zkAddress = HBaseConnection.getZKConnectString();
+            logger.info("no 'kylin.zookeeper.address' in kylin.properties, use HBase zookeeper " + zkAddress);
+        }
+        
         this.clusterName = kylinConfig.getClusterName();
         this.admin = new ZKHelixAdmin(zkAddress);
     }
@@ -84,7 +95,7 @@ public class HelixClusterAdmin {
         } else if (Constant.SERVER_MODE_STREAM.equalsIgnoreCase(kylinConfig.getServerMode())) {
             instanceTags.add(HelixClusterAdmin.TAG_STREAM_BUILDER);
         }
-        
+
         addInstance(instanceName, instanceTags);
         startInstance(instanceName);
 
@@ -114,7 +125,7 @@ public class HelixClusterAdmin {
         }
 
     }
-    
+
     public void addStreamingJob(String streamingName, long start, long end) {
         String resourceName = RESOURCE_STREAME_CUBE_PREFIX + streamingName + "_" + start + "_" + end;
         if (!admin.getResourcesInCluster(clusterName).contains(resourceName)) {
@@ -124,9 +135,9 @@ public class HelixClusterAdmin {
         }
 
         admin.rebalance(clusterName, resourceName, 2, "", TAG_STREAM_BUILDER);
-        
+
     }
-    
+
     public void dropStreamingJob(String streamingName, long start, long end) {
         String resourceName = RESOURCE_STREAME_CUBE_PREFIX + streamingName + "_" + start + "_" + end;
         admin.dropResource(clusterName, resourceName);
@@ -258,7 +269,7 @@ public class HelixClusterAdmin {
                 int indexOfUnderscore = instanceName.lastIndexOf("_");
                 instanceRestAddresses.add(instanceName.substring(0, indexOfUnderscore) + ":" + instanceName.substring(indexOfUnderscore + 1));
             }
-            String restServersInCluster = Strings.join(instanceRestAddresses, ",");
+            String restServersInCluster = StringUtil.join(instanceRestAddresses, ",");
             kylinConfig.setProperty("kylin.rest.servers", restServersInCluster);
             System.setProperty("kylin.rest.servers", restServersInCluster);
             logger.info("kylin.rest.servers update to " + restServersInCluster);

http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
index 8ca4669..e7411a9 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -590,8 +590,11 @@ public class CubeService extends BasicService {
     public void updateOnNewSegmentReady(String cubeName) {
         logger.debug("on updateOnNewSegmentReady: " + cubeName);
         final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-        HelixClusterAdmin jobEngineAdmin = HelixClusterAdmin.getInstance(kylinConfig);
-        boolean isLeaderRole = jobEngineAdmin.isLeaderRole(HelixClusterAdmin.RESOURCE_NAME_JOB_ENGINE);
+        boolean isLeaderRole = true;
+        if (kylinConfig.isClusterEnabled()) {
+            HelixClusterAdmin jobEngineAdmin = HelixClusterAdmin.getInstance(kylinConfig);
+            isLeaderRole = jobEngineAdmin.isLeaderRole(HelixClusterAdmin.RESOURCE_NAME_JOB_ENGINE);
+        }
         logger.debug("server is leader role ? " + isLeaderRole);
         if (isLeaderRole == true) {
             keepCubeRetention(cubeName);

http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/server/src/test/java/org/apache/kylin/rest/controller/JobControllerTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/controller/JobControllerTest.java b/server/src/test/java/org/apache/kylin/rest/controller/JobControllerTest.java
index 697f11f..c95d738 100644
--- a/server/src/test/java/org/apache/kylin/rest/controller/JobControllerTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/controller/JobControllerTest.java
@@ -1,122 +1,123 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.rest.controller;
-
-import static org.junit.Assert.assertNotNull;
-
-import java.io.IOException;
-import java.util.Date;
-
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.cube.CubeDescManager;
-import org.apache.kylin.cube.CubeInstance;
-import org.apache.kylin.cube.CubeManager;
-import org.apache.kylin.cube.model.CubeDesc;
-import org.apache.kylin.job.JobInstance;
-import org.apache.kylin.job.dao.ExecutableDao;
-import org.apache.kylin.job.exception.PersistentException;
-import org.apache.kylin.rest.request.JobBuildRequest;
-import org.apache.kylin.rest.request.JobListRequest;
-import org.apache.kylin.rest.service.CubeService;
-import org.apache.kylin.rest.service.JobService;
-import org.apache.kylin.rest.service.ServiceTestBase;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-import org.springframework.beans.factory.annotation.Autowired;
-
-/**
- * @author xduo
- */
-public class JobControllerTest extends ServiceTestBase {
-
-    private JobController jobSchedulerController;
-    private CubeController cubeController;
-    @Autowired
-    JobService jobService;
-
-    @Autowired
-    CubeService cubeService;
-    private static final String CUBE_NAME = "new_job_controller";
-
-    private CubeManager cubeManager;
-    private CubeDescManager cubeDescManager;
-    private ExecutableDao executableDAO;
-
-    @Before
-    public void setup() throws Exception {
-        super.setup();
-
-        jobSchedulerController = new JobController();
-        jobSchedulerController.setJobService(jobService);
-        cubeController = new CubeController();
-        cubeController.setJobService(jobService);
-        cubeController.setCubeService(cubeService);
-
-        KylinConfig testConfig = getTestConfig();
-        cubeManager = CubeManager.getInstance(testConfig);
-        cubeDescManager = CubeDescManager.getInstance(testConfig);
-        executableDAO = ExecutableDao.getInstance(testConfig);
-
-    }
-
-    @After
-    public void tearDown() throws Exception {
-        if (cubeManager.getCube(CUBE_NAME) != null) {
-            cubeManager.dropCube(CUBE_NAME, false);
-        }
-    }
-
-    @Test
-    public void testBasics() throws IOException, PersistentException {
-        CubeDesc cubeDesc = cubeDescManager.getCubeDesc("test_kylin_cube_with_slr_left_join_desc");
-        CubeInstance cube = cubeManager.createCube(CUBE_NAME, "DEFAULT", cubeDesc, "test");
-        assertNotNull(cube);
-
-        JobListRequest jobRequest = new JobListRequest();
-        jobRequest.setTimeFilter(4);
-        Assert.assertNotNull(jobSchedulerController.list(jobRequest));
-
-        JobBuildRequest jobBuildRequest = new JobBuildRequest();
-        jobBuildRequest.setBuildType("BUILD");
-        jobBuildRequest.setStartTime(0L);
-        jobBuildRequest.setEndTime(new Date().getTime());
-        JobInstance job = cubeController.rebuild(CUBE_NAME, jobBuildRequest);
-
-        Assert.assertNotNull(jobSchedulerController.get(job.getId()));
-        executableDAO.deleteJob(job.getId());
-        if (cubeManager.getCube(CUBE_NAME) != null) {
-            cubeManager.dropCube(CUBE_NAME, false);
-        }
-
-        // jobSchedulerController.cancel(job.getId());
-    }
-
-    @Test(expected = RuntimeException.class)
-    public void testResume() throws IOException {
-        JobBuildRequest jobBuildRequest = new JobBuildRequest();
-        jobBuildRequest.setBuildType("BUILD");
-        jobBuildRequest.setStartTime(20130331080000L);
-        jobBuildRequest.setEndTime(20131212080000L);
-        JobInstance job = cubeController.rebuild(CUBE_NAME, jobBuildRequest);
-
-        jobSchedulerController.resume(job.getId());
-    }
-}
+///*
+// * Licensed to the Apache Software Foundation (ASF) under one
+// * or more contributor license agreements.  See the NOTICE file
+// * distributed with this work for additional information
+// * regarding copyright ownership.  The ASF licenses this file
+// * to you under the Apache License, Version 2.0 (the
+// * "License"); you may not use this file except in compliance
+// * with the License.  You may obtain a copy of the License at
+// * 
+// *     http://www.apache.org/licenses/LICENSE-2.0
+// * 
+// * Unless required by applicable law or agreed to in writing, software
+// * distributed under the License is distributed on an "AS IS" BASIS,
+// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// * See the License for the specific language governing permissions and
+// * limitations under the License.
+//*/
+//
+//package org.apache.kylin.rest.controller;
+//
+//import static org.junit.Assert.assertNotNull;
+//
+//import java.io.IOException;
+//import java.util.Date;
+//
+//import org.apache.kylin.common.KylinConfig;
+//import org.apache.kylin.cube.CubeDescManager;
+//import org.apache.kylin.cube.CubeInstance;
+//import org.apache.kylin.cube.CubeManager;
+//import org.apache.kylin.cube.model.CubeDesc;
+//import org.apache.kylin.job.JobInstance;
+//import org.apache.kylin.job.dao.ExecutableDao;
+//import org.apache.kylin.job.exception.PersistentException;
+//import org.apache.kylin.rest.request.JobBuildRequest;
+//import org.apache.kylin.rest.request.JobListRequest;
+//import org.apache.kylin.rest.service.CubeService;
+//import org.apache.kylin.rest.service.JobService;
+//import org.apache.kylin.rest.service.ServiceTestBase;
+//import org.junit.After;
+//import org.junit.Assert;
+//import org.junit.Before;
+//import org.junit.Test;
+//import org.springframework.beans.factory.annotation.Autowired;
+//
+///**
+// * @author xduo
+// */
+//public class JobControllerTest extends ServiceTestBase {
+//
+//    private JobController jobSchedulerController;
+//    private CubeController cubeController;
+//    @Autowired
+//    JobService jobService;
+//
+//    @Autowired
+//    CubeService cubeService;
+//    private static final String CUBE_NAME = "new_job_controller";
+//
+//    private CubeManager cubeManager;
+//    private CubeDescManager cubeDescManager;
+//    private ExecutableDao executableDAO;
+//
+//    @Before
+//    public void setup() throws Exception {
+//        super.setup();
+//
+//        KylinConfig testConfig = getTestConfig();
+//        testConfig.setZookeeperAddress("sandbox:2181");
+//        jobSchedulerController = new JobController();
+//        jobSchedulerController.setJobService(jobService);
+//        cubeController = new CubeController();
+//        cubeController.setJobService(jobService);
+//        cubeController.setCubeService(cubeService);
+//
+//        cubeManager = CubeManager.getInstance(testConfig);
+//        cubeDescManager = CubeDescManager.getInstance(testConfig);
+//        executableDAO = ExecutableDao.getInstance(testConfig);
+//
+//    }
+//
+//    @After
+//    public void tearDown() throws Exception {
+//        if (cubeManager.getCube(CUBE_NAME) != null) {
+//            cubeManager.dropCube(CUBE_NAME, false);
+//        }
+//    }
+//
+//    @Test
+//    public void testBasics() throws IOException, PersistentException {
+//        CubeDesc cubeDesc = cubeDescManager.getCubeDesc("test_kylin_cube_with_slr_left_join_desc");
+//        CubeInstance cube = cubeManager.createCube(CUBE_NAME, "DEFAULT", cubeDesc, "test");
+//        assertNotNull(cube);
+//
+//        JobListRequest jobRequest = new JobListRequest();
+//        jobRequest.setTimeFilter(4);
+//        Assert.assertNotNull(jobSchedulerController.list(jobRequest));
+//
+//        JobBuildRequest jobBuildRequest = new JobBuildRequest();
+//        jobBuildRequest.setBuildType("BUILD");
+//        jobBuildRequest.setStartTime(0L);
+//        jobBuildRequest.setEndTime(new Date().getTime());
+//        JobInstance job = cubeController.rebuild(CUBE_NAME, jobBuildRequest);
+//
+//        Assert.assertNotNull(jobSchedulerController.get(job.getId()));
+//        executableDAO.deleteJob(job.getId());
+//        if (cubeManager.getCube(CUBE_NAME) != null) {
+//            cubeManager.dropCube(CUBE_NAME, false);
+//        }
+//
+//        // jobSchedulerController.cancel(job.getId());
+//    }
+//
+//    @Test(expected = RuntimeException.class)
+//    public void testResume() throws IOException {
+//        JobBuildRequest jobBuildRequest = new JobBuildRequest();
+//        jobBuildRequest.setBuildType("BUILD");
+//        jobBuildRequest.setStartTime(20130331080000L);
+//        jobBuildRequest.setEndTime(20131212080000L);
+//        JobInstance job = cubeController.rebuild(CUBE_NAME, jobBuildRequest);
+//
+//        jobSchedulerController.resume(job.getId());
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java b/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java
index 70525b3..594e76b5 100644
--- a/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java
@@ -54,10 +54,10 @@ public class HelixClusterAdminTest extends LocalFileMetadataTestCase {
     public void setup() throws Exception {
         createTestMetadata();
         // start zookeeper on localhost
-        final File tmpDir = new File("/tmp/helix-quickstart");
+        final File tmpDir = File.createTempFile("HelixClusterAdminTest", null); 
         FileUtil.fullyDelete(tmpDir);
         tmpDir.mkdirs();
-        server = new ZkServer("/tmp/helix-quickstart/dataDir", "/tmp/helix-quickstart/logDir", new IDefaultNameSpace() {
+        server = new ZkServer(tmpDir.getAbsolutePath() + "/dataDir", tmpDir.getAbsolutePath() + "/logDir", new IDefaultNameSpace() {
             @Override
             public void createDefaultNameSpace(ZkClient zkClient) {
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java b/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
index 4449d2b..763bebe 100644
--- a/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
@@ -76,13 +76,10 @@ public class CacheServiceTest extends LocalFileMetadataTestCase {
     @BeforeClass
     public static void beforeClass() throws Exception {
         staticCreateTestMetadata();
-        startZookeeper();
         configA = KylinConfig.getInstanceFromEnv();
         configA.setProperty("kylin.rest.servers", "localhost:7070");
-        configA.setProperty("kylin.zookeeper.address", ZK_ADDRESS);
         configB = KylinConfig.getKylinConfigFromInputStream(KylinConfig.getKylinPropertiesAsInputSteam());
         configB.setProperty("kylin.rest.servers", "localhost:7070");
-        configB.setProperty("kylin.zookeeper.address", ZK_ADDRESS);
         configB.setMetadataUrl("../examples/test_metadata");
 
         server = new Server(7070);
@@ -366,19 +363,4 @@ public class CacheServiceTest extends LocalFileMetadataTestCase {
         return false;
     }
 
-
-    public static void startZookeeper() {
-        logger.info("STARTING Zookeeper at " + ZK_ADDRESS);
-        IDefaultNameSpace defaultNameSpace = new IDefaultNameSpace() {
-            @Override
-            public void createDefaultNameSpace(ZkClient zkClient) {
-            }
-        };
-        new File("/tmp/helix-quickstart").mkdirs();
-        // start zookeeper
-        ZkServer server =
-                new ZkServer("/tmp/helix-quickstart/dataDir", "/tmp/helix-quickstart/logDir",
-                        defaultNameSpace, 2199);
-        server.start();
-    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
index 661e8e4..0279d2d 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
@@ -19,9 +19,12 @@
 package org.apache.kylin.storage.hbase;
 
 import java.io.IOException;
+import java.util.Arrays;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
+import com.google.common.base.Function;
+import com.google.common.collect.Iterables;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -40,6 +43,8 @@ import org.apache.kylin.engine.mr.HadoopUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import javax.annotation.Nullable;
+
 /**
  * @author yangli9
  * 
@@ -227,4 +232,16 @@ public class HBaseConnection {
         }
     }
 
+    public static final String getZKConnectString() {
+        Configuration conf = getCurrentHBaseConfiguration();
+        final String serverList = conf.get(HConstants.ZOOKEEPER_QUORUM);
+        final String port = conf.get(HConstants.ZOOKEEPER_CLIENT_PORT);
+        return org.apache.commons.lang3.StringUtils.join(Iterables.transform(Arrays.asList(serverList.split(",")), new Function<String, String>() {
+            @Nullable
+            @Override
+            public String apply(String input) {
+                return input + ":" + port;
+            }
+        }), ",");
+    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/c38efff5/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
index d211206..30f2df7 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ZookeeperJobLock.java
@@ -1,10 +1,5 @@
 package org.apache.kylin.storage.hbase.util;
 
-import java.util.Arrays;
-import java.util.concurrent.TimeUnit;
-
-import javax.annotation.Nullable;
-
 import org.apache.commons.lang.StringUtils;
 import org.apache.curator.RetryPolicy;
 import org.apache.curator.framework.CuratorFramework;
@@ -12,16 +7,13 @@ import org.apache.curator.framework.CuratorFrameworkFactory;
 import org.apache.curator.framework.imps.CuratorFrameworkState;
 import org.apache.curator.framework.recipes.locks.InterProcessMutex;
 import org.apache.curator.retry.ExponentialBackoffRetry;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.job.lock.JobLock;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Function;
-import com.google.common.collect.Iterables;
+import java.util.concurrent.TimeUnit;
 
 /**
  */
@@ -37,7 +29,7 @@ public class ZookeeperJobLock implements JobLock {
     @Override
     public boolean lock() {
         this.scheduleID = schedulerId();
-        String zkConnectString = getZKConnectString();
+        String zkConnectString = HBaseConnection.getZKConnectString();
         logger.info("zk connection string:" + zkConnectString);
         logger.info("schedulerId:" + scheduleID);
         if (StringUtils.isEmpty(zkConnectString)) {
@@ -67,19 +59,6 @@ public class ZookeeperJobLock implements JobLock {
         releaseLock();
     }
 
-    private String getZKConnectString() {
-        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
-        final String serverList = conf.get(HConstants.ZOOKEEPER_QUORUM);
-        final String port = conf.get(HConstants.ZOOKEEPER_CLIENT_PORT);
-        return org.apache.commons.lang3.StringUtils.join(Iterables.transform(Arrays.asList(serverList.split(",")), new Function<String, String>() {
-            @Nullable
-            @Override
-            public String apply(String input) {
-                return input + ":" + port;
-            }
-        }), ",");
-    }
-
     private void releaseLock() {
         try {
             if (zkClient.getState().equals(CuratorFrameworkState.STARTED)) {


[30/43] kylin git commit: KYLIN-1311 Stream cubing auto assignment and load balance

Posted by sh...@apache.org.
KYLIN-1311 Stream cubing auto assignment and load balance


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/ab60480f
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/ab60480f
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/ab60480f

Branch: refs/heads/helix-rebase
Commit: ab60480f244a106a4e3179590956a04246e9f4db
Parents: 55558551
Author: shaofengshi <sh...@apache.org>
Authored: Thu Jan 14 14:59:54 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:19 2016 +0800

----------------------------------------------------------------------
 .../kylin/engine/streaming/BootstrapConfig.java |  8 --
 .../engine/streaming/cli/StreamingCLI.java      |  3 -
 .../kylin/rest/controller/CubeController.java   |  5 ++
 .../rest/controller/StreamingController.java    | 50 +++++++++++++
 .../kylin/rest/helix/HelixClusterAdmin.java     | 13 +++-
 .../helix/LeaderStandbyStateModelFactory.java   | 43 +++++++----
 .../rest/request/StreamingBuildRequest.java     | 77 ++++++++++++++++++++
 .../kylin/rest/request/StreamingRequest.java    |  4 +-
 .../kylin/rest/service/StreamingService.java    | 27 +++++++
 9 files changed, 201 insertions(+), 29 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/ab60480f/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/BootstrapConfig.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/BootstrapConfig.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/BootstrapConfig.java
index a3e2db5..2b83b84 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/BootstrapConfig.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/BootstrapConfig.java
@@ -36,14 +36,6 @@ public class BootstrapConfig {
         this.streaming = streaming;
     }
 
-    public int getPartitionId() {
-        return partitionId;
-    }
-
-    public void setPartitionId(int partitionId) {
-        this.partitionId = partitionId;
-    }
-
     public boolean isFillGap() {
         return fillGap;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab60480f/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java
index a73a6ac..96ad1ad 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java
@@ -72,9 +72,6 @@ public class StreamingCLI {
                 case "-streaming":
                     bootstrapConfig.setStreaming(args[++i]);
                     break;
-                case "-partition":
-                    bootstrapConfig.setPartitionId(Integer.parseInt(args[++i]));
-                    break;
                 case "-fillGap":
                     bootstrapConfig.setFillGap(Boolean.parseBoolean(args[++i]));
                     break;

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab60480f/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java b/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java
index 9afa750..4ab640f 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java
@@ -27,14 +27,19 @@ import java.util.Map;
 import java.util.UUID;
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.JsonUtil;
+import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.cube.CubeUpdate;
 import org.apache.kylin.cube.model.CubeBuildTypeEnum;
 import org.apache.kylin.cube.model.CubeDesc;
 import org.apache.kylin.cube.model.CubeJoinedFlatTableDesc;
+import org.apache.kylin.engine.streaming.BootstrapConfig;
 import org.apache.kylin.engine.streaming.StreamingConfig;
+import org.apache.kylin.engine.streaming.StreamingManager;
+import org.apache.kylin.engine.streaming.monitor.StreamingMonitor;
 import org.apache.kylin.job.JobInstance;
 import org.apache.kylin.job.JoinedFlatTable;
 import org.apache.kylin.job.exception.JobException;

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab60480f/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java b/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
index e22bd30..57831d5 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
@@ -21,14 +21,23 @@ package org.apache.kylin.rest.controller;
 import com.fasterxml.jackson.core.JsonParseException;
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.JsonMappingException;
+import com.google.common.base.Preconditions;
 import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.util.JsonUtil;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.model.CubeBuildTypeEnum;
+import org.apache.kylin.engine.streaming.BootstrapConfig;
 import org.apache.kylin.engine.streaming.StreamingConfig;
+import org.apache.kylin.job.JobInstance;
+import org.apache.kylin.job.exception.JobException;
 import org.apache.kylin.rest.exception.BadRequestException;
 import org.apache.kylin.rest.exception.ForbiddenException;
 import org.apache.kylin.rest.exception.InternalErrorException;
 import org.apache.kylin.rest.exception.NotFoundException;
+import org.apache.kylin.rest.request.StreamingBuildRequest;
 import org.apache.kylin.rest.request.StreamingRequest;
+import org.apache.kylin.rest.service.CubeService;
 import org.apache.kylin.rest.service.KafkaConfigService;
 import org.apache.kylin.rest.service.StreamingService;
 import org.apache.kylin.source.kafka.config.KafkaConfig;
@@ -36,6 +45,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.security.access.AccessDeniedException;
+import org.springframework.security.core.context.SecurityContextHolder;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.*;
 
@@ -58,6 +68,9 @@ public class StreamingController extends BasicController {
     @Autowired
     private KafkaConfigService kafkaConfigService;
 
+    @Autowired
+    private CubeService cubeService;
+
     @RequestMapping(value = "/getConfig", method = { RequestMethod.GET })
     @ResponseBody
     public List<StreamingConfig> getStreamings(@RequestParam(value = "cubeName", required = false) String cubeName, @RequestParam(value = "limit", required = false) Integer limit, @RequestParam(value = "offset", required = false) Integer offset) {
@@ -214,6 +227,43 @@ public class StreamingController extends BasicController {
         request.setMessage(message);
     }
 
+
+
+    /**
+     * Send a stream build request
+     *
+     * @param cubeName Cube ID
+     * @return
+     * @throws IOException
+     */
+    @RequestMapping(value = "/{streamingName}/build", method = {RequestMethod.PUT})
+    @ResponseBody
+    public StreamingBuildRequest buildStream(@PathVariable String streamingName, @RequestBody StreamingBuildRequest streamingBuildRequest) {
+        streamingBuildRequest.setStreaming(streamingName);
+        StreamingConfig streamingConfig = streamingService.getStreamingManager().getConfig(streamingName);
+        Preconditions.checkNotNull(streamingConfig, "Stream config '" + streamingName + "' is not found.");
+        String cubeName = streamingConfig.getCubeName();
+        List<CubeInstance> cubes = cubeService.getCubes(cubeName, null, null, null, null);
+        Preconditions.checkArgument(cubes.size() == 1, "Cube '" + cubeName + "' is not found.");
+        CubeInstance cube = cubes.get(0);
+        if (streamingBuildRequest.isFillGap() == false) {
+            Preconditions.checkArgument(streamingBuildRequest.getEnd() > streamingBuildRequest.getStart(), "End time should be greater than start time.");
+            for (CubeSegment segment : cube.getSegments()) {
+                if (segment.getDateRangeStart() <= streamingBuildRequest.getStart() && segment.getDateRangeEnd() >= streamingBuildRequest.getEnd()) {
+                    streamingBuildRequest.setMessage("The segment already exists: " + segment.toString());
+                    streamingBuildRequest.setSuccessful(false);
+                    return streamingBuildRequest;
+                }
+            }
+        }
+
+        streamingService.buildStream(streamingName, streamingBuildRequest);
+        streamingBuildRequest.setMessage("Build request is submitted successfully.");
+        streamingBuildRequest.setSuccessful(true);
+        return streamingBuildRequest;
+
+    }
+
     public void setStreamingService(StreamingService streamingService) {
         this.streamingService= streamingService;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab60480f/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java b/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
index 6300383..f62204d 100644
--- a/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
+++ b/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
@@ -45,7 +45,7 @@ import java.util.concurrent.ConcurrentMap;
 public class HelixClusterAdmin {
 
     public static final String RESOURCE_NAME_JOB_ENGINE = "Resource_JobEngine";
-    public static final String RESOURCE_STREAME_CUBE_PREFIX = "Resource_Streame_";
+    public static final String RESOURCE_STREAME_CUBE_PREFIX = "Resource_Stream_";
 
     public static final String MODEL_LEADER_STANDBY = "LeaderStandby";
     public static final String MODEL_ONLINE_OFFLINE = "OnlineOffline";
@@ -115,15 +115,22 @@ public class HelixClusterAdmin {
 
     }
     
-    public void addStreamCubeSlice(String cubeName, long start, long end) {
-        String resourceName = RESOURCE_STREAME_CUBE_PREFIX + cubeName + "_" + start + "_" + end;
+    public void addStreamingJob(String streamingName, long start, long end) {
+        String resourceName = RESOURCE_STREAME_CUBE_PREFIX + streamingName + "_" + start + "_" + end;
         if (!admin.getResourcesInCluster(clusterName).contains(resourceName)) {
             admin.addResource(clusterName, resourceName, 1, MODEL_LEADER_STANDBY, IdealState.RebalanceMode.SEMI_AUTO.name());
+        } else {
+            logger.warn("Resource '" + resourceName + "' already exists in cluster, skip adding.");
         }
 
         admin.rebalance(clusterName, resourceName, 2, "", TAG_STREAM_BUILDER);
         
     }
+    
+    public void dropStreamingJob(String streamingName, long start, long end) {
+        String resourceName = RESOURCE_STREAME_CUBE_PREFIX + streamingName + "_" + start + "_" + end;
+        admin.dropResource(clusterName, resourceName);
+    }
 
     /**
      * Start the instance and register the state model factory

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab60480f/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java b/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
index c2a78e7..df23ea0 100644
--- a/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
+++ b/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
@@ -9,21 +9,24 @@ import org.apache.helix.api.id.ResourceId;
 import org.apache.helix.model.Message;
 import org.apache.helix.participant.statemachine.Transition;
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.engine.streaming.OneOffStreamingBuilder;
-import org.apache.kylin.engine.streaming.cli.StreamingCLI;
+import org.apache.kylin.common.KylinConfigBase;
 import org.apache.kylin.job.engine.JobEngineConfig;
 import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
 import org.apache.kylin.job.lock.MockJobLock;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+
 import static org.apache.kylin.rest.helix.HelixClusterAdmin.RESOURCE_STREAME_CUBE_PREFIX;
 
 /**
  */
 public class LeaderStandbyStateModelFactory extends StateTransitionHandlerFactory<TransitionHandler> {
     private static final Logger logger = LoggerFactory.getLogger(LeaderStandbyStateModelFactory.class);
-    
+
     @Override
     public TransitionHandler createStateTransitionHandler(PartitionId partitionId) {
         if (partitionId.getResourceId().equals(ResourceId.from(HelixClusterAdmin.RESOURCE_NAME_JOB_ENGINE))) {
@@ -38,7 +41,7 @@ public class LeaderStandbyStateModelFactory extends StateTransitionHandlerFactor
     }
 
     public static class JobEngineStateModel extends TransitionHandler {
-        
+
         public static JobEngineStateModel INSTANCE = new JobEngineStateModel();
 
         @Transition(to = "LEADER", from = "STANDBY")
@@ -62,7 +65,7 @@ public class LeaderStandbyStateModelFactory extends StateTransitionHandlerFactor
         public void onBecomeStandbyFromLeader(Message message, NotificationContext context) {
             logger.info("JobEngineStateModel.onBecomeStandbyFromLeader()");
             DefaultScheduler.destroyInstance();
-            
+
         }
 
         @Transition(to = "STANDBY", from = "OFFLINE")
@@ -71,7 +74,6 @@ public class LeaderStandbyStateModelFactory extends StateTransitionHandlerFactor
 
         }
 
-
         @Transition(to = "OFFLINE", from = "STANDBY")
         public void onBecomeOfflineFromStandby(Message message, NotificationContext context) {
             logger.info("JobEngineStateModel.onBecomeOfflineFromStandby()");
@@ -80,7 +82,7 @@ public class LeaderStandbyStateModelFactory extends StateTransitionHandlerFactor
     }
 
     public static class StreamCubeStateModel extends TransitionHandler {
-        
+
         public static StreamCubeStateModel INSTANCE = new StreamCubeStateModel();
 
         @Transition(to = "LEADER", from = "STANDBY")
@@ -90,27 +92,40 @@ public class LeaderStandbyStateModelFactory extends StateTransitionHandlerFactor
             long end = Long.parseLong(resourceName.substring(resourceName.lastIndexOf("_")) + 1);
             String temp = resourceName.substring(RESOURCE_STREAME_CUBE_PREFIX.length(), resourceName.lastIndexOf("_"));
             long start = Long.parseLong(temp.substring(temp.lastIndexOf("_")) + 1);
-            String cubeName = temp.substring(0, temp.lastIndexOf("_"));
+            String streamingConfig = temp.substring(0, temp.lastIndexOf("_"));
+
+            KylinConfigBase.getKylinHome();
+            String segmentId = start + "_" + end;
+            String cmd = KylinConfigBase.getKylinHome() + "/bin/kylin.sh streaming start " + streamingConfig + " " + segmentId + " -oneoff true -start " + start + " -end " + end + " -streaming " + streamingConfig;
+            logger.info("Executing: " + cmd);
+            try {
+                String line;
+                Process p = Runtime.getRuntime().exec(cmd);
+                BufferedReader input = new BufferedReader(new InputStreamReader(p.getInputStream()));
+                while ((line = input.readLine()) != null) {
+                    logger.info(line);
+                }
+                input.close();
+            } catch (IOException err) {
+                logger.error("Error happens during build streaming  '" + resourceName + "'", err);
+                throw new RuntimeException(err);
+            }
 
-            final Runnable runnable = new OneOffStreamingBuilder(cubeName, start, end).build();
-            runnable.run();
         }
 
         @Transition(to = "STANDBY", from = "LEADER")
         public void onBecomeStandbyFromLeader(Message message, NotificationContext context) {
-           
 
         }
 
         @Transition(to = "STANDBY", from = "OFFLINE")
         public void onBecomeStandbyFromOffline(Message message, NotificationContext context) {
-           
-        }
 
+        }
 
         @Transition(to = "OFFLINE", from = "STANDBY")
         public void onBecomeOfflineFromStandby(Message message, NotificationContext context) {
-           
+
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab60480f/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java b/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java
new file mode 100644
index 0000000..e06a06c
--- /dev/null
+++ b/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.rest.request;
+
+public class StreamingBuildRequest {
+
+    private String streaming;
+    private long start;
+    private long end;
+    private boolean fillGap;
+    private String message;
+    private boolean successful;
+
+    public String getStreaming() {
+        return streaming;
+    }
+
+    public void setStreaming(String streaming) {
+        this.streaming = streaming;
+    }
+
+    public boolean isSuccessful() {
+        return successful;
+    }
+
+    public void setSuccessful(boolean successful) {
+        this.successful = successful;
+    }
+
+    public String getMessage() {
+        return message;
+    }
+
+    public void setMessage(String message) {
+        this.message = message;
+    }
+
+    public long getStart() {
+        return start;
+    }
+
+    public void setStart(long start) {
+        this.start = start;
+    }
+
+    public long getEnd() {
+        return end;
+    }
+
+    public void setEnd(long end) {
+        this.end = end;
+    }
+
+    public boolean isFillGap() {
+        return fillGap;
+    }
+
+    public void setFillGap(boolean fillGap) {
+        this.fillGap = fillGap;
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab60480f/server/src/main/java/org/apache/kylin/rest/request/StreamingRequest.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/request/StreamingRequest.java b/server/src/main/java/org/apache/kylin/rest/request/StreamingRequest.java
index 07c30f3..b737c3e 100644
--- a/server/src/main/java/org/apache/kylin/rest/request/StreamingRequest.java
+++ b/server/src/main/java/org/apache/kylin/rest/request/StreamingRequest.java
@@ -19,7 +19,9 @@
 
 package org.apache.kylin.rest.request;
 
-import java.lang.String;public class StreamingRequest {
+import java.lang.String;
+
+public class StreamingRequest {
 
     private String project;
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab60480f/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java b/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
index e40426b..da20949 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
@@ -18,12 +18,22 @@
 
 package org.apache.kylin.rest.service;
 
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.engine.streaming.BootstrapConfig;
 import org.apache.kylin.engine.streaming.StreamingConfig;
+import org.apache.kylin.engine.streaming.StreamingManager;
+import org.apache.kylin.engine.streaming.monitor.StreamingMonitor;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.exception.InternalErrorException;
+import org.apache.kylin.rest.helix.HelixClusterAdmin;
+import org.apache.kylin.rest.request.StreamingBuildRequest;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.security.access.prepost.PostFilter;
+import org.springframework.security.access.prepost.PreAuthorize;
 import org.springframework.stereotype.Component;
 
 import java.io.IOException;
@@ -33,6 +43,7 @@ import java.util.List;
 @Component("streamingMgmtService")
 public class StreamingService extends BasicService {
 
+    private static final Logger logger = LoggerFactory.getLogger(StreamingService.class);
     @Autowired
     private AccessService accessService;
 
@@ -87,4 +98,20 @@ public class StreamingService extends BasicService {
         getStreamingManager().removeStreamingConfig(config);
     }
 
+
+    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
+    public void buildStream(String cube, StreamingBuildRequest streamingBuildRequest) {
+        HelixClusterAdmin clusterAdmin = HelixClusterAdmin.getInstance(KylinConfig.getInstanceFromEnv());
+        if (streamingBuildRequest.isFillGap()) {
+            final StreamingConfig streamingConfig = StreamingManager.getInstance(KylinConfig.getInstanceFromEnv()).getStreamingConfig(streamingBuildRequest.getStreaming());
+            final List<Pair<Long, Long>> gaps = StreamingMonitor.findGaps(streamingConfig.getCubeName());
+            logger.info("all gaps:" + org.apache.commons.lang3.StringUtils.join(gaps, ","));
+            for (Pair<Long, Long> gap : gaps) {
+                clusterAdmin.addStreamingJob(streamingBuildRequest.getStreaming(), gap.getFirst(), gap.getSecond());
+            }
+        } else {
+            clusterAdmin.addStreamingJob(streamingBuildRequest.getStreaming(), streamingBuildRequest.getStart(), streamingBuildRequest.getEnd());
+        }
+    }
+
 }


[25/43] kylin git commit: KYLIN-1189 resume running jobs when job engine failover

Posted by sh...@apache.org.
KYLIN-1189 resume running jobs when job engine failover


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/0d7f9e66
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/0d7f9e66
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/0d7f9e66

Branch: refs/heads/helix-rebase
Commit: 0d7f9e666903a3f226eb877240e87e4942d8a9fd
Parents: 3b5260a
Author: shaofengshi <sh...@apache.org>
Authored: Thu Dec 3 10:32:40 2015 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:19 2016 +0800

----------------------------------------------------------------------
 .../job/impl/threadpool/DefaultScheduler.java   |  7 +-----
 .../kylin/job/manager/ExecutableManager.java    | 16 +++++++++++++
 .../kylin/rest/service/CacheServiceTest.java    | 25 ++++++++++++++++++++
 3 files changed, 42 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/0d7f9e66/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java
index 417e279..2915c60 100644
--- a/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java
+++ b/core-job/src/main/java/org/apache/kylin/job/impl/threadpool/DefaultScheduler.java
@@ -170,12 +170,7 @@ public class DefaultScheduler implements Scheduler<AbstractExecutable>, Connecti
         jobPool = new ThreadPoolExecutor(corePoolSize, corePoolSize, Long.MAX_VALUE, TimeUnit.DAYS, new SynchronousQueue<Runnable>());
         context = new DefaultContext(Maps.<String, Executable> newConcurrentMap(), jobEngineConfig.getConfig());
 
-        for (AbstractExecutable executable : executableManager.getAllExecutables()) {
-            if (executable.getStatus() == ExecutableState.READY) {
-                executableManager.updateJobOutput(executable.getId(), ExecutableState.ERROR, null, "scheduler initializing work to reset job to ERROR status");
-            }
-        }
-        executableManager.updateAllRunningJobsToError();
+        executableManager.resumeAllRunningJobs();
 
         Runtime.getRuntime().addShutdownHook(new Thread() {
             public void run() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/0d7f9e66/core-job/src/main/java/org/apache/kylin/job/manager/ExecutableManager.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/manager/ExecutableManager.java b/core-job/src/main/java/org/apache/kylin/job/manager/ExecutableManager.java
index 3effbe7..4d03389 100644
--- a/core-job/src/main/java/org/apache/kylin/job/manager/ExecutableManager.java
+++ b/core-job/src/main/java/org/apache/kylin/job/manager/ExecutableManager.java
@@ -207,6 +207,7 @@ public class ExecutableManager {
         }
     }
 
+    @Deprecated
     public void updateAllRunningJobsToError() {
         try {
             final List<ExecutableOutputPO> jobOutputs = executableDao.getJobOutputs();
@@ -222,6 +223,21 @@ public class ExecutableManager {
         }
     }
 
+    public void resumeAllRunningJobs() {
+        try {
+            final List<ExecutableOutputPO> jobOutputs = executableDao.getJobOutputs();
+            for (ExecutableOutputPO executableOutputPO : jobOutputs) {
+                if (executableOutputPO.getStatus().equalsIgnoreCase(ExecutableState.RUNNING.toString())) {
+                    executableOutputPO.setStatus(ExecutableState.READY.toString());
+                    executableDao.updateJobOutput(executableOutputPO);
+                }
+            }
+        } catch (PersistentException e) {
+            logger.error("error reset job status from RUNNING to READY", e);
+            throw new RuntimeException(e);
+        }
+    }
+
     public void resumeJob(String jobId) {
         AbstractExecutable job = getJob(jobId);
         if (job == null) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/0d7f9e66/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java b/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
index 25b131a..4449d2b 100644
--- a/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
@@ -20,11 +20,15 @@ package org.apache.kylin.rest.service;
 
 import static org.junit.Assert.*;
 
+import java.io.File;
 import java.util.Arrays;
 import java.util.Set;
 import java.util.UUID;
 import java.util.concurrent.atomic.AtomicLong;
 
+import org.I0Itec.zkclient.IDefaultNameSpace;
+import org.I0Itec.zkclient.ZkClient;
+import org.I0Itec.zkclient.ZkServer;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.restclient.Broadcaster;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
@@ -60,6 +64,8 @@ public class CacheServiceTest extends LocalFileMetadataTestCase {
 
     private static Server server;
 
+    private static String ZK_ADDRESS = "localhost:2199";
+    
     private static KylinConfig configA;
     private static KylinConfig configB;
 
@@ -70,10 +76,13 @@ public class CacheServiceTest extends LocalFileMetadataTestCase {
     @BeforeClass
     public static void beforeClass() throws Exception {
         staticCreateTestMetadata();
+        startZookeeper();
         configA = KylinConfig.getInstanceFromEnv();
         configA.setProperty("kylin.rest.servers", "localhost:7070");
+        configA.setProperty("kylin.zookeeper.address", ZK_ADDRESS);
         configB = KylinConfig.getKylinConfigFromInputStream(KylinConfig.getKylinPropertiesAsInputSteam());
         configB.setProperty("kylin.rest.servers", "localhost:7070");
+        configB.setProperty("kylin.zookeeper.address", ZK_ADDRESS);
         configB.setMetadataUrl("../examples/test_metadata");
 
         server = new Server(7070);
@@ -356,4 +365,20 @@ public class CacheServiceTest extends LocalFileMetadataTestCase {
         }
         return false;
     }
+
+
+    public static void startZookeeper() {
+        logger.info("STARTING Zookeeper at " + ZK_ADDRESS);
+        IDefaultNameSpace defaultNameSpace = new IDefaultNameSpace() {
+            @Override
+            public void createDefaultNameSpace(ZkClient zkClient) {
+            }
+        };
+        new File("/tmp/helix-quickstart").mkdirs();
+        // start zookeeper
+        ZkServer server =
+                new ZkServer("/tmp/helix-quickstart/dataDir", "/tmp/helix-quickstart/logDir",
+                        defaultNameSpace, 2199);
+        server.start();
+    }
 }


[15/43] kylin git commit: minor, UI remove deprecated code

Posted by sh...@apache.org.
minor, UI remove deprecated code


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/61f3278e
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/61f3278e
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/61f3278e

Branch: refs/heads/helix-rebase
Commit: 61f3278e5dd98cbfaca8438b9cabd2d4b577ffc6
Parents: a541068
Author: Jason <ji...@163.com>
Authored: Wed Mar 2 17:55:46 2016 +0800
Committer: Jason <ji...@163.com>
Committed: Wed Mar 2 17:55:46 2016 +0800

----------------------------------------------------------------------
 webapp/app/partials/cubeDesigner/refresh_settings.html | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/61f3278e/webapp/app/partials/cubeDesigner/refresh_settings.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/cubeDesigner/refresh_settings.html b/webapp/app/partials/cubeDesigner/refresh_settings.html
index 15dd4af..1ad294e 100755
--- a/webapp/app/partials/cubeDesigner/refresh_settings.html
+++ b/webapp/app/partials/cubeDesigner/refresh_settings.html
@@ -131,7 +131,7 @@
                       Please input start date when partition date column is defined in model.
                     </small>
                     <!--vier model will convert use filter-->
-                    <span ng-if="state.mode=='view' && metaModel.model.partition_desc.partition_date_column!=null && metaModel.model.partition_desc.partition_date_column">{{(cubeMetaFrame.partition_date_start)|reverseToGMT0 }}</span>
+                    <span ng-if="state.mode=='view' && metaModel.model.partition_desc.partition_date_column">{{(cubeMetaFrame.partition_date_start)|reverseToGMT0 }}</span>
                   </div>
                 </div>
               </div>


[20/43] kylin git commit: KYLIN-1311 on the way

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/server/src/test/java/org/apache/kylin/rest/service/TestBaseWithZookeeper.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/service/TestBaseWithZookeeper.java b/server/src/test/java/org/apache/kylin/rest/service/TestBaseWithZookeeper.java
new file mode 100644
index 0000000..3182c16
--- /dev/null
+++ b/server/src/test/java/org/apache/kylin/rest/service/TestBaseWithZookeeper.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.rest.service;
+
+import org.I0Itec.zkclient.IDefaultNameSpace;
+import org.I0Itec.zkclient.ZkClient;
+import org.I0Itec.zkclient.ZkServer;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.kylin.common.util.LocalFileMetadataTestCase;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.springframework.security.authentication.TestingAuthenticationToken;
+import org.springframework.security.core.Authentication;
+import org.springframework.security.core.context.SecurityContextHolder;
+
+import java.io.File;
+
+/**
+ */
+public class TestBaseWithZookeeper extends LocalFileMetadataTestCase {
+    protected static final String zkAddress = "localhost:2199";
+    static ZkServer server;
+    static boolean zkStarted = false;
+
+    @BeforeClass
+    public static void setupResource() throws Exception {
+        staticCreateTestMetadata();
+
+        if (zkStarted == false) {
+            final File tmpDir = File.createTempFile("KylinTest", null);
+            FileUtil.fullyDelete(tmpDir);
+            tmpDir.mkdirs();
+            tmpDir.deleteOnExit();
+            server = new ZkServer(tmpDir.getAbsolutePath() + "/dataDir", tmpDir.getAbsolutePath() + "/logDir", new IDefaultNameSpace() {
+                @Override
+                public void createDefaultNameSpace(ZkClient zkClient) {
+                }
+            }, 2199, 1000, 2000);
+
+            server.start();
+            zkStarted = true;
+            System.setProperty("kylin.zookeeper.address", zkAddress);
+        }
+
+    }
+
+    @AfterClass
+    public static void tearDownResource() {
+        if (server == null) {
+            server.shutdown();
+            zkStarted = false;
+            System.setProperty("kylin.zookeeper.address", "");
+        }
+
+        staticCleanupTestMetadata();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/1c4deab9/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
index 0907623..b075387 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
@@ -40,7 +40,10 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.ConcurrentSkipListMap;
 
+import com.google.common.collect.Maps;
 import kafka.message.MessageAndOffset;
 
 import org.apache.commons.lang3.StringUtils;
@@ -102,7 +105,9 @@ public final class TimedJsonStreamParser extends StreamingParser {
     @Override
     public StreamingMessage parse(MessageAndOffset messageAndOffset) {
         try {
-            Map<String, String> root = mapper.readValue(new ByteBufferBackedInputStream(messageAndOffset.message().payload()), mapType);
+            Map<String, String> message = mapper.readValue(new ByteBufferBackedInputStream(messageAndOffset.message().payload()), mapType);
+            ConcurrentMap<String, String> root = new ConcurrentSkipListMap<String, String>(String.CASE_INSENSITIVE_ORDER);
+            root.putAll(message);
             String tsStr = root.get(tsColName);
             //Preconditions.checkArgument(!StringUtils.isEmpty(tsStr), "Timestamp field " + tsColName + //
             //" cannot be null, the message offset is " + messageAndOffset.getOffset() + " content is " + new String(messageAndOffset.getRawData()));


[27/43] kylin git commit: KYLIN-1188 use helix 0.7.1 to manage the job engine assignment

Posted by sh...@apache.org.
KYLIN-1188 use helix 0.7.1 to manage the job engine assignment


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/96e9577b
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/96e9577b
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/96e9577b

Branch: refs/heads/helix-rebase
Commit: 96e9577b3dd491b456ceff9eb52e66dd621c890d
Parents: 436db28
Author: shaofengshi <sh...@apache.org>
Authored: Tue Jan 12 15:07:25 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:19 2016 +0800

----------------------------------------------------------------------
 build/conf/kylin.properties                     |  16 +-
 .../apache/kylin/common/KylinConfigBase.java    |  28 +++
 .../test_case_data/sandbox/kylin.properties     |   2 +
 pom.xml                                         |   1 +
 .../kylin/rest/controller/JobController.java    |  33 +--
 .../kylin/rest/helix/HelixClusterAdmin.java     | 245 +++++++++++++++++++
 .../helix/LeaderStandbyStateModelFactory.java   |  70 ++++++
 .../apache/kylin/rest/service/CubeService.java  |   6 +-
 .../kylin/rest/helix/HelixClusterAdminTest.java | 140 +++++++++++
 9 files changed, 516 insertions(+), 25 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/96e9577b/build/conf/kylin.properties
----------------------------------------------------------------------
diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
index 78a564d..bbfa7c8 100644
--- a/build/conf/kylin.properties
+++ b/build/conf/kylin.properties
@@ -1,12 +1,24 @@
-## Config for Kylin Engine ##
+## Cluster related properties ##
+# Required, comma separated list of zk servers; 
+kylin.zookeeper.address=
 
+# rest address of this instance, ;
+# optional, default be <hostname>:7070
+kylin.rest.address=
+
+# whether run a cluster controller in this node
+kylin.cluster.controller=true
 
 # optional information for the owner of kylin platform, it can be your team's email
 # currently it will be attached to each kylin's htable attribute
 kylin.owner=whoami@kylin.apache.org
 
 # List of web servers in use, this enables one web server instance to sync up with other servers.
-kylin.rest.servers=localhost:7070
+# Deprecated, cluster will self-discover and update this.
+# kylin.rest.servers=localhost:7070
+
+# Server mode: all, job, query
+kylin.server.mode=all
 
 # The metadata store in hbase
 kylin.metadata.url=kylin_metadata@hbase

http://git-wip-us.apache.org/repos/asf/kylin/blob/96e9577b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 826a28c..6d3ac0d 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -538,6 +538,34 @@ public class KylinConfigBase implements Serializable {
         return Boolean.parseBoolean(getOptional("crossdomain.enable", "true"));
     }
 
+    public String getZookeeperAddress() {
+        return this.getOptional("kylin.zookeeper.address");
+    }
+
+    public void setZookeeperAddress(String zkAddress) {
+        setProperty("kylin.zookeeper.address", zkAddress);
+    }
+    
+    public String getClusterName() {
+        return this.getOptional("kylin.cluster.name", getMetadataUrlPrefix());
+    }
+
+    public void setClusterName(String clusterName) {
+        setProperty("kylin.cluster.name", clusterName);
+    }
+    
+    public boolean isClusterController() {
+        return Boolean.parseBoolean(getOptional("kylin.cluster.controller", "true"));
+    }
+    
+    public String getRestAddress() {
+        return this.getOptional("kylin.rest.address");
+    }
+
+    public void setRestAddress(String restAddress) {
+        setProperty("kylin.rest.address", restAddress);
+    }
+    
     public String toString() {
         return getMetadataUrl();
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/96e9577b/examples/test_case_data/sandbox/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
index 0d89b8c..1878e0a 100644
--- a/examples/test_case_data/sandbox/kylin.properties
+++ b/examples/test_case_data/sandbox/kylin.properties
@@ -11,6 +11,8 @@ kylin.rest.servers=localhost:7070
 #set display timezone on UI,format like[GMT+N or GMT-N]
 kylin.rest.timezone=GMT-8
 
+kylin.server.mode=all
+>>>>>>> KYLIN-1188 use helix 0.7.1 to manage the job engine assignment
 # The metadata store in hbase
 kylin.metadata.url=kylin_metadata@hbase
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/96e9577b/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 2e42841..24b0dd9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -113,6 +113,7 @@
             org/apache/kylin/**/tools/**:**/*CLI.java
         </sonar.jacoco.excludes>
 
+        <helix.version>0.7.1</helix.version>
     </properties>
 
     <licenses>

http://git-wip-us.apache.org/repos/asf/kylin/blob/96e9577b/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/JobController.java b/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
index 9dfb594..741b5ee 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
@@ -26,15 +26,13 @@ import java.util.List;
 import java.util.Map;
 import java.util.TimeZone;
 
-import com.google.common.collect.Lists;
-import joptsimple.internal.Strings;
+import com.google.common.base.Preconditions;
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.restclient.Broadcaster;
 import org.apache.kylin.job.JobInstance;
 import org.apache.kylin.job.constant.JobStatusEnum;
 import org.apache.kylin.job.constant.JobTimeFilterEnum;
 import org.apache.kylin.rest.exception.InternalErrorException;
-import org.apache.kylin.rest.helix.HelixJobEngineAdmin;
+import org.apache.kylin.rest.helix.HelixClusterAdmin;
 import org.apache.kylin.rest.request.JobListRequest;
 import org.apache.kylin.rest.service.JobService;
 import org.slf4j.Logger;
@@ -51,8 +49,6 @@ import java.io.IOException;
 import java.util.*;
 
 /**
- * @author ysong1
- * @author Jack
  * 
  */
 @Controller
@@ -76,9 +72,19 @@ public class JobController extends BasicController implements InitializingBean {
         TimeZone tzone = TimeZone.getTimeZone(timeZone);
         TimeZone.setDefault(tzone);
 
-        final String instanceName = HelixJobEngineAdmin.getCurrentInstanceName();
         final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
 
+        Preconditions.checkNotNull(kylinConfig.getZookeeperAddress(), "'kylin.zookeeper.address' couldn't be null, set it in kylin.properties.");
+        final HelixClusterAdmin clusterAdmin = HelixClusterAdmin.getInstance(kylinConfig);
+        clusterAdmin.start();
+        
+        Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
+            @Override
+            public void run() {
+                clusterAdmin.stop();
+            }
+        }));
+
     }
 
     /**
@@ -190,17 +196,4 @@ public class JobController extends BasicController implements InitializingBean {
         this.jobService = jobService;
     }
 
-    private void updateKylinCluster(List<String> instances) {
-        List<String> instanceRestAddresses = Lists.newArrayList();
-        for (String instanceName : instances) {
-            int indexOfUnderscore = instanceName.lastIndexOf("_");
-            instanceRestAddresses.add(instanceName.substring(0, indexOfUnderscore) + ":" + instanceName.substring(indexOfUnderscore + 1));
-        }
-        String restServersInCluster = Strings.join(instanceRestAddresses, ",");
-        KylinConfig.getInstanceFromEnv().setProperty("kylin.rest.servers", restServersInCluster);
-        System.setProperty("kylin.rest.servers", restServersInCluster);
-        Broadcaster.clearCache();
-
-    }
-
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/96e9577b/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java b/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
new file mode 100644
index 0000000..9983aae
--- /dev/null
+++ b/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
@@ -0,0 +1,245 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+package org.apache.kylin.rest.helix;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import joptsimple.internal.Strings;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.helix.*;
+import org.apache.helix.api.id.StateModelDefId;
+import org.apache.helix.controller.HelixControllerMain;
+import org.apache.helix.manager.zk.ZKHelixAdmin;
+import org.apache.helix.model.*;
+import org.apache.helix.tools.StateModelConfigGenerator;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.restclient.Broadcaster;
+import org.apache.kylin.rest.constant.Constant;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentMap;
+
+/**
+ * Administrator of Kylin cluster
+ */
+public class HelixClusterAdmin {
+
+    public static final String RESOURCE_NAME_JOB_ENGINE = "Resource_JobEngine";
+
+    public static final String MODEL_LEADER_STANDBY = "LeaderStandby";
+    public static final String MODEL_ONLINE_OFFLINE = "OnlineOffline";
+    public static final String TAG_JOB_ENGINE = "Tag_JobEngine";
+
+    private static ConcurrentMap<KylinConfig, HelixClusterAdmin> instanceMaps = Maps.newConcurrentMap();
+    private HelixManager participantManager;
+    private HelixManager controllerManager;
+
+    private final KylinConfig kylinConfig;
+
+    private static final Logger logger = LoggerFactory.getLogger(HelixClusterAdmin.class);
+    private final String zkAddress;
+    private final ZKHelixAdmin admin;
+    private final String clusterName;
+
+    private HelixClusterAdmin(KylinConfig kylinConfig) {
+        this.kylinConfig = kylinConfig;
+        this.zkAddress = kylinConfig.getZookeeperAddress();
+        this.clusterName = kylinConfig.getClusterName();
+        this.admin = new ZKHelixAdmin(zkAddress);
+    }
+
+    public void start() throws Exception {
+        initCluster();
+        final String instanceName = getCurrentInstanceName();
+
+        // use the tag to mark node's role.
+        final List<String> instanceTags = Lists.newArrayList();
+        final boolean runJobEngine = Constant.SERVER_MODE_ALL.equalsIgnoreCase(kylinConfig.getServerMode()) || Constant.SERVER_MODE_JOB.equalsIgnoreCase(kylinConfig.getServerMode());
+        if (runJobEngine) {
+            instanceTags.add(HelixClusterAdmin.TAG_JOB_ENGINE);
+        }
+
+        addInstance(instanceName, instanceTags);
+        startInstance(instanceName);
+
+        rebalanceWithTag(instanceTags);
+
+        boolean startController = kylinConfig.isClusterController();
+        if (startController) {
+            startController();
+        }
+    }
+
+    /**
+     * Initiate the cluster, adding state model definitions and resource definitions
+     */
+    protected void initCluster() {
+        admin.addCluster(clusterName, false);
+        if (admin.getStateModelDef(clusterName, MODEL_ONLINE_OFFLINE) == null) {
+            admin.addStateModelDef(clusterName, MODEL_ONLINE_OFFLINE, new StateModelDefinition(StateModelConfigGenerator.generateConfigForOnlineOffline()));
+        }
+        if (admin.getStateModelDef(clusterName, MODEL_LEADER_STANDBY) == null) {
+            admin.addStateModelDef(clusterName, MODEL_LEADER_STANDBY, new StateModelDefinition(StateModelConfigGenerator.generateConfigForLeaderStandby()));
+        }
+
+        // add job engine as a resource, 1 partition
+        if (!admin.getResourcesInCluster(clusterName).contains(HelixClusterAdmin.RESOURCE_NAME_JOB_ENGINE)) {
+            admin.addResource(clusterName, HelixClusterAdmin.RESOURCE_NAME_JOB_ENGINE, 1, MODEL_LEADER_STANDBY, IdealState.RebalanceMode.SEMI_AUTO.name());
+        }
+
+    }
+
+    /**
+     * Start the instance and register the state model factory
+     * @param instanceName
+     * @throws Exception
+     */
+    protected void startInstance(String instanceName) throws Exception {
+        participantManager = HelixManagerFactory.getZKHelixManager(clusterName, instanceName, InstanceType.PARTICIPANT, zkAddress);
+        participantManager.getStateMachineEngine().registerStateModelFactory(StateModelDefId.from(MODEL_LEADER_STANDBY), new LeaderStandbyStateModelFactory());
+        participantManager.connect();
+        participantManager.addLiveInstanceChangeListener(new KylinClusterLiveInstanceChangeListener());
+
+    }
+
+    /**
+     * Rebalance the resource with the tags
+     * @param tags
+     */
+    protected void rebalanceWithTag(List<String> tags) {
+        for (String tag : tags) {
+            if (tag.equals(TAG_JOB_ENGINE)) {
+                List<String> instances = admin.getInstancesInClusterWithTag(clusterName, TAG_JOB_ENGINE);
+                admin.rebalance(clusterName, RESOURCE_NAME_JOB_ENGINE, instances.size(), "", tag);
+            }
+        }
+    }
+
+    /**
+     * Start an embedded helix controller
+     */
+    protected void startController() {
+        controllerManager = HelixControllerMain.startHelixController(zkAddress, clusterName, "controller", HelixControllerMain.STANDALONE);
+    }
+
+    public void stop() {
+        if (participantManager != null) {
+            participantManager.disconnect();
+        }
+
+        if (controllerManager != null) {
+            controllerManager.disconnect();
+        }
+    }
+
+    public String getInstanceState(String resourceName) {
+        String instanceName = this.getCurrentInstanceName();
+        final ExternalView resourceExternalView = admin.getResourceExternalView(clusterName, resourceName);
+        if (resourceExternalView == null) {
+            logger.warn("fail to get ExternalView, clusterName:" + clusterName + " resourceName:" + resourceName);
+            return "ERROR";
+        }
+        final Set<String> partitionSet = resourceExternalView.getPartitionSet();
+        final Map<String, String> stateMap = resourceExternalView.getStateMap(partitionSet.iterator().next());
+        if (stateMap.containsKey(instanceName)) {
+            return stateMap.get(instanceName);
+        } else {
+            logger.warn("fail to get state, clusterName:" + clusterName + " resourceName:" + resourceName + " instance:" + instanceName);
+            return "ERROR";
+        }
+    }
+
+    /**
+     * Check whether current kylin instance is in the leader role
+     * @return
+     */
+    public boolean isLeaderRole(String resourceName) {
+        final String instanceState = getInstanceState(resourceName);
+        logger.debug("instance state: " + instanceState);
+        if ("LEADER".equalsIgnoreCase(instanceState)) {
+            return true;
+        }
+
+        return false;
+    }
+
+    /**
+     * Add instance to cluster, with a tag list
+     * @param instanceName should be unique in format: hostName_port
+     * @param tags
+     */
+    public void addInstance(String instanceName, List<String> tags) {
+        final String hostname = instanceName.substring(0, instanceName.lastIndexOf("_"));
+        final String port = instanceName.substring(instanceName.lastIndexOf("_") + 1);
+        InstanceConfig instanceConfig = new InstanceConfig(instanceName);
+        instanceConfig.setHostName(hostname);
+        instanceConfig.setPort(port);
+        if (tags != null) {
+            for (String tag : tags) {
+                instanceConfig.addTag(tag);
+            }
+        }
+
+        if (admin.getInstancesInCluster(clusterName).contains(instanceName)) {
+            admin.dropInstance(clusterName, instanceConfig);
+        }
+        admin.addInstance(clusterName, instanceConfig);
+    }
+
+    public static HelixClusterAdmin getInstance(KylinConfig kylinConfig) {
+        Preconditions.checkNotNull(kylinConfig);
+        instanceMaps.putIfAbsent(kylinConfig, new HelixClusterAdmin(kylinConfig));
+        return instanceMaps.get(kylinConfig);
+    }
+
+    public String getCurrentInstanceName() {
+        final String restAddress = kylinConfig.getRestAddress();
+        if (StringUtils.isEmpty(restAddress)) {
+            throw new RuntimeException("There is no kylin.rest.address set in System property and kylin.properties;");
+        }
+
+        final String hostname = Preconditions.checkNotNull(restAddress.substring(0, restAddress.lastIndexOf(":")), "failed to get HostName of this server");
+        final String port = Preconditions.checkNotNull(restAddress.substring(restAddress.lastIndexOf(":") + 1), "failed to get port of this server");
+        return hostname + "_" + port;
+    }
+
+    /**
+     * Listen to the cluster's event, update "kylin.rest.servers" to the live instances.
+     */
+    class KylinClusterLiveInstanceChangeListener implements LiveInstanceChangeListener {
+        @Override
+        public void onLiveInstanceChange(List<LiveInstance> liveInstances, NotificationContext changeContext) {
+            List<String> instanceRestAddresses = Lists.newArrayList();
+            for (LiveInstance liveInstance : liveInstances) {
+                String instanceName = liveInstance.getInstanceName();
+                int indexOfUnderscore = instanceName.lastIndexOf("_");
+                instanceRestAddresses.add(instanceName.substring(0, indexOfUnderscore) + ":" + instanceName.substring(indexOfUnderscore + 1));
+            }
+            String restServersInCluster = Strings.join(instanceRestAddresses, ",");
+            kylinConfig.setProperty("kylin.rest.servers", restServersInCluster);
+            System.setProperty("kylin.rest.servers", restServersInCluster);
+            logger.info("kylin.rest.servers update to " + restServersInCluster);
+            Broadcaster.clearCache();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/96e9577b/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java b/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
new file mode 100644
index 0000000..6694c81
--- /dev/null
+++ b/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
@@ -0,0 +1,70 @@
+package org.apache.kylin.rest.helix;
+
+import org.apache.helix.NotificationContext;
+import org.apache.helix.api.StateTransitionHandlerFactory;
+import org.apache.helix.api.TransitionHandler;
+import org.apache.helix.api.id.PartitionId;
+import org.apache.helix.api.id.ResourceId;
+import org.apache.helix.model.Message;
+import org.apache.helix.participant.statemachine.Transition;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
+import org.apache.kylin.job.lock.MockJobLock;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ */
+public class LeaderStandbyStateModelFactory extends StateTransitionHandlerFactory<TransitionHandler> {
+    private static final Logger logger = LoggerFactory.getLogger(LeaderStandbyStateModelFactory.class);
+    
+    @Override
+    public TransitionHandler createStateTransitionHandler(PartitionId partitionId) {
+        if (partitionId.getResourceId().equals(ResourceId.from(HelixClusterAdmin.RESOURCE_NAME_JOB_ENGINE))) {
+            return new JobEngineStateModel();
+        }
+        
+        return null;
+    }
+
+    public static class JobEngineStateModel extends TransitionHandler {
+
+        @Transition(to = "LEADER", from = "STANDBY")
+        public void onBecomeLeaderFromStandby(Message message, NotificationContext context) {
+            logger.info("JobEngineStateModel.onBecomeLeaderFromStandby()");
+            try {
+                KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+                DefaultScheduler scheduler = DefaultScheduler.createInstance();
+                scheduler.init(new JobEngineConfig(kylinConfig), new MockJobLock());
+                while (!scheduler.hasStarted()) {
+                    logger.error("scheduler has not been started");
+                    Thread.sleep(1000);
+                }
+            } catch (Exception e) {
+                logger.error("error start DefaultScheduler", e);
+                throw new RuntimeException(e);
+            }
+        }
+
+        @Transition(to = "STANDBY", from = "LEADER")
+        public void onBecomeStandbyFromLeader(Message message, NotificationContext context) {
+            logger.info("JobEngineStateModel.onBecomeStandbyFromLeader()");
+            DefaultScheduler.destroyInstance();
+            
+        }
+
+        @Transition(to = "STANDBY", from = "OFFLINE")
+        public void onBecomeStandbyFromOffline(Message message, NotificationContext context) {
+            logger.info("JobEngineStateModel.onBecomeStandbyFromOffline()");
+
+        }
+
+
+        @Transition(to = "OFFLINE", from = "STANDBY")
+        public void onBecomeOfflineFromStandby(Message message, NotificationContext context) {
+            logger.info("JobEngineStateModel.onBecomeOfflineFromStandby()");
+
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/96e9577b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
index 7916835..8ca4669 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -56,7 +56,7 @@ import org.apache.kylin.metadata.realization.RealizationStatusEnum;
 import org.apache.kylin.metadata.realization.RealizationType;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.exception.InternalErrorException;
-import org.apache.kylin.rest.helix.HelixJobEngineAdmin;
+import org.apache.kylin.rest.helix.HelixClusterAdmin;
 import org.apache.kylin.rest.request.MetricsRequest;
 import org.apache.kylin.rest.response.HBaseResponse;
 import org.apache.kylin.rest.response.MetricsResponse;
@@ -590,8 +590,8 @@ public class CubeService extends BasicService {
     public void updateOnNewSegmentReady(String cubeName) {
         logger.debug("on updateOnNewSegmentReady: " + cubeName);
         final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-        HelixJobEngineAdmin jobEngineAdmin = HelixJobEngineAdmin.getInstance(kylinConfig.getZookeeperAddress());
-        boolean isLeaderRole = jobEngineAdmin.isLeaderRole(kylinConfig.getClusterName(), HelixJobEngineAdmin.getCurrentInstanceName());
+        HelixClusterAdmin jobEngineAdmin = HelixClusterAdmin.getInstance(kylinConfig);
+        boolean isLeaderRole = jobEngineAdmin.isLeaderRole(HelixClusterAdmin.RESOURCE_NAME_JOB_ENGINE);
         logger.debug("server is leader role ? " + isLeaderRole);
         if (isLeaderRole == true) {
             keepCubeRetention(cubeName);

http://git-wip-us.apache.org/repos/asf/kylin/blob/96e9577b/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java b/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java
new file mode 100644
index 0000000..70525b3
--- /dev/null
+++ b/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java
@@ -0,0 +1,140 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+* 
+*     http://www.apache.org/licenses/LICENSE-2.0
+* 
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.apache.kylin.rest.helix;
+
+import org.I0Itec.zkclient.IDefaultNameSpace;
+import org.I0Itec.zkclient.ZkClient;
+import org.I0Itec.zkclient.ZkServer;
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.helix.manager.zk.ZKHelixAdmin;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.LocalFileMetadataTestCase;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.InputStream;
+
+import static org.apache.kylin.rest.helix.HelixClusterAdmin.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+/**
+*/
+public class HelixClusterAdminTest extends LocalFileMetadataTestCase {
+
+    String zkAddress = "localhost:2199";
+    ZkServer server;
+
+    HelixClusterAdmin clusterAdmin1;
+    HelixClusterAdmin clusterAdmin2;
+    KylinConfig kylinConfig;
+
+    private static final String CLUSTER_NAME = "test_cluster";
+
+    @Before
+    public void setup() throws Exception {
+        createTestMetadata();
+        // start zookeeper on localhost
+        final File tmpDir = new File("/tmp/helix-quickstart");
+        FileUtil.fullyDelete(tmpDir);
+        tmpDir.mkdirs();
+        server = new ZkServer("/tmp/helix-quickstart/dataDir", "/tmp/helix-quickstart/logDir", new IDefaultNameSpace() {
+            @Override
+            public void createDefaultNameSpace(ZkClient zkClient) {
+            }
+        }, 2199);
+        server.start();
+
+        kylinConfig = this.getTestConfig();
+        kylinConfig.setRestAddress("localhost:7070");
+        kylinConfig.setZookeeperAddress(zkAddress);
+        kylinConfig.setClusterName(CLUSTER_NAME);
+        
+        final ZKHelixAdmin zkHelixAdmin = new ZKHelixAdmin(zkAddress);
+        zkHelixAdmin.dropCluster(kylinConfig.getClusterName());
+
+    }
+
+    @Test
+    public void test() throws Exception {
+        
+        // 1. start one instance
+        clusterAdmin1 = getInstance(kylinConfig);
+        clusterAdmin1.start();
+
+        Thread.sleep(1000);
+        assertTrue(clusterAdmin1.isLeaderRole(RESOURCE_NAME_JOB_ENGINE));
+        assertEquals(1, kylinConfig.getRestServers().length);
+        assertEquals("localhost:7070", kylinConfig.getRestServers()[0]);
+        
+        // 2. start second instance
+        InputStream is = IOUtils.toInputStream(kylinConfig.getConfigAsString());
+        KylinConfig kylinConfig2 = KylinConfig.getKylinConfigFromInputStream(is);
+        kylinConfig2.setRestAddress("localhost:7072");
+        is.close();
+
+
+        clusterAdmin2 = getInstance(kylinConfig2);
+        clusterAdmin2.start();
+
+        Thread.sleep(1000);
+        assertTrue(clusterAdmin1.isLeaderRole(RESOURCE_NAME_JOB_ENGINE));
+        assertFalse(clusterAdmin2.isLeaderRole(RESOURCE_NAME_JOB_ENGINE));
+        assertEquals(2, kylinConfig.getRestServers().length);
+        assertEquals("localhost:7070", kylinConfig.getRestServers()[0]);
+        assertEquals("localhost:7072", kylinConfig.getRestServers()[1]);
+        
+        // 3. shutdown the first instance
+        clusterAdmin1.stop();
+        clusterAdmin1 = null;
+        Thread.sleep(1000);
+        assertTrue(clusterAdmin2.isLeaderRole(RESOURCE_NAME_JOB_ENGINE));
+        assertEquals(1, kylinConfig.getRestServers().length);
+        assertEquals("localhost:7072", kylinConfig.getRestServers()[0]);
+        
+        // 4. recover first instance
+        clusterAdmin1 = getInstance(kylinConfig);
+        clusterAdmin1.start();
+
+        Thread.sleep(1000);
+        assertTrue(clusterAdmin1.isLeaderRole(RESOURCE_NAME_JOB_ENGINE));
+        assertFalse(clusterAdmin2.isLeaderRole(RESOURCE_NAME_JOB_ENGINE));
+        assertEquals(2, kylinConfig.getRestServers().length);
+        assertEquals("localhost:7070", kylinConfig.getRestServers()[0]);
+        assertEquals("localhost:7072", kylinConfig.getRestServers()[1]);
+    }
+
+    @After
+    public void tearDown() {
+        if (clusterAdmin1 != null) {
+            clusterAdmin1.stop();
+        }
+
+        if (clusterAdmin2 != null) {
+            clusterAdmin2.stop();
+        }
+        
+        server.shutdown();
+        cleanupTestMetadata();
+    }
+
+}


[24/43] kylin git commit: KYLIN-1387 Streaming cubing doesn't generate cuboids files on HDFS, cause cube merge failure

Posted by sh...@apache.org.
KYLIN-1387 Streaming cubing doesn't generate cuboids files on HDFS, cause cube merge failure


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/d0449451
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/d0449451
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/d0449451

Branch: refs/heads/helix-rebase
Commit: d0449451ff7be5aa8875973541a6483262a5db70
Parents: ebef971
Author: shaofengshi <sh...@apache.org>
Authored: Tue Feb 2 17:34:46 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:19 2016 +0800

----------------------------------------------------------------------
 .../cube/inmemcubing/CompoundCuboidWriter.java  | 57 ++++++++++++++
 .../kylin/cube/inmemcubing/ICuboidWriter.java   |  4 +-
 .../kylin/job/constant/ExecutableConstants.java |  1 +
 .../kylin/engine/mr/steps/KVGTRecordWriter.java | 81 ++++++++++++++++++++
 .../mr/steps/MapContextGTRecordWriter.java      | 69 ++---------------
 .../streaming/cube/StreamingCubeBuilder.java    | 12 ++-
 .../storage/hbase/steps/HBaseCuboidWriter.java  | 24 +++---
 .../hbase/steps/HBaseMROutput2Transition.java   |  2 +-
 .../kylin/storage/hbase/steps/HBaseMRSteps.java |  2 +-
 .../hbase/steps/HBaseStreamingOutput.java       |  8 +-
 .../hbase/steps/SequenceFileCuboidWriter.java   | 75 ++++++++++++++++++
 11 files changed, 254 insertions(+), 81 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/d0449451/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/CompoundCuboidWriter.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/CompoundCuboidWriter.java b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/CompoundCuboidWriter.java
new file mode 100644
index 0000000..46eef50
--- /dev/null
+++ b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/CompoundCuboidWriter.java
@@ -0,0 +1,57 @@
+/*
+ *  Licensed to the Apache Software Foundation (ASF) under one or more
+ *  contributor license agreements. See the NOTICE file distributed with
+ *  this work for additional information regarding copyright ownership.
+ *  The ASF licenses this file to You under the Apache License, Version 2.0
+ *  (the "License"); you may not use this file except in compliance with
+ *  the License. You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.kylin.cube.inmemcubing;
+
+import org.apache.kylin.gridtable.GTRecord;
+
+import java.io.IOException;
+
+/**
+ */
+public class CompoundCuboidWriter implements ICuboidWriter {
+
+    private Iterable<ICuboidWriter> cuboidWriters;
+
+    public CompoundCuboidWriter(Iterable<ICuboidWriter> cuboidWriters) {
+        this.cuboidWriters = cuboidWriters;
+
+    }
+
+    @Override
+    public void write(long cuboidId, GTRecord record) throws IOException {
+        for (ICuboidWriter writer : cuboidWriters) {
+            writer.write(cuboidId, record);
+        }
+    }
+
+    @Override
+    public void flush() throws IOException {
+        for (ICuboidWriter writer : cuboidWriters) {
+            writer.flush();
+        }
+
+    }
+
+    @Override
+    public void close() throws IOException {
+        for (ICuboidWriter writer : cuboidWriters) {
+            writer.close();
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/d0449451/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ICuboidWriter.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ICuboidWriter.java b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ICuboidWriter.java
index 9e26e5e..e6cfa02 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ICuboidWriter.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ICuboidWriter.java
@@ -27,7 +27,7 @@ public interface ICuboidWriter {
 
     void write(long cuboidId, GTRecord record) throws IOException;
 
-    void flush();
+    void flush() throws IOException;
     
-    void close();
+    void close() throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/d0449451/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java b/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
index ba50880..d370b0d 100644
--- a/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
+++ b/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
@@ -56,6 +56,7 @@ public final class ExecutableConstants {
     public static final String STEP_NAME_MERGE_CUBOID = "Merge Cuboid Data";
     public static final String STEP_NAME_UPDATE_CUBE_INFO = "Update Cube Info";
     public static final String STEP_NAME_GARBAGE_COLLECTION = "Garbage Collection";
+    public static final String STEP_NAME_GARBAGE_COLLECTION_HDFS = "Garbage Collection on HDFS";
 
     public static final String STEP_NAME_BUILD_II = "Build Inverted Index";
     public static final String STEP_NAME_CONVERT_II_TO_HFILE = "Convert Inverted Index Data to HFile";

http://git-wip-us.apache.org/repos/asf/kylin/blob/d0449451/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/KVGTRecordWriter.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/KVGTRecordWriter.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/KVGTRecordWriter.java
new file mode 100644
index 0000000..e201705
--- /dev/null
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/KVGTRecordWriter.java
@@ -0,0 +1,81 @@
+package org.apache.kylin.engine.mr.steps;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.cuboid.Cuboid;
+import org.apache.kylin.cube.inmemcubing.ICuboidWriter;
+import org.apache.kylin.cube.kv.AbstractRowKeyEncoder;
+import org.apache.kylin.cube.kv.RowConstants;
+import org.apache.kylin.cube.model.CubeDesc;
+import org.apache.kylin.engine.mr.ByteArrayWritable;
+import org.apache.kylin.gridtable.GTRecord;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/**
+ */
+public abstract class KVGTRecordWriter implements ICuboidWriter {
+
+    private static final Log logger = LogFactory.getLog(KVGTRecordWriter.class);
+    private Long lastCuboidId;
+    protected CubeSegment cubeSegment;
+    protected CubeDesc cubeDesc;
+
+    private AbstractRowKeyEncoder rowKeyEncoder;
+    private int dimensions;
+    private int measureCount;
+    private byte[] keyBuf;
+    private int[] measureColumnsIndex;
+    private ByteBuffer valueBuf = ByteBuffer.allocate(RowConstants.ROWVALUE_BUFFER_SIZE);
+    private ByteArrayWritable outputKey = new ByteArrayWritable();
+    private ByteArrayWritable outputValue = new ByteArrayWritable();
+    private long cuboidRowCount = 0;
+
+    //for shard
+
+    public KVGTRecordWriter(CubeDesc cubeDesc, CubeSegment cubeSegment) {
+        this.cubeDesc = cubeDesc;
+        this.cubeSegment = cubeSegment;
+        this.measureCount = cubeDesc.getMeasures().size();
+    }
+
+    @Override
+    public void write(long cuboidId, GTRecord record) throws IOException {
+
+        if (lastCuboidId == null || !lastCuboidId.equals(cuboidId)) {
+            if (lastCuboidId != null) {
+                logger.info("Cuboid " + lastCuboidId + " has " + cuboidRowCount + " rows");
+                cuboidRowCount = 0;
+            }
+            // output another cuboid
+            initVariables(cuboidId);
+            lastCuboidId = cuboidId;
+        }
+
+        cuboidRowCount++;
+        rowKeyEncoder.encode(record, record.getInfo().getPrimaryKey(), keyBuf);
+
+        //output measures
+        valueBuf.clear();
+        record.exportColumns(measureColumnsIndex, valueBuf);
+
+        outputKey.set(keyBuf, 0, keyBuf.length);
+        outputValue.set(valueBuf.array(), 0, valueBuf.position());
+        writeAsKeyValue(outputKey, outputValue);
+    }
+
+    protected abstract void writeAsKeyValue(ByteArrayWritable key, ByteArrayWritable value) throws IOException;
+
+    private void initVariables(Long cuboidId) {
+        rowKeyEncoder = AbstractRowKeyEncoder.createInstance(cubeSegment, Cuboid.findById(cubeDesc, cuboidId));
+        keyBuf = rowKeyEncoder.createBuf();
+
+        dimensions = Long.bitCount(cuboidId);
+        measureColumnsIndex = new int[measureCount];
+        for (int i = 0; i < measureCount; i++) {
+            measureColumnsIndex[i] = dimensions + i;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/d0449451/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MapContextGTRecordWriter.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MapContextGTRecordWriter.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MapContextGTRecordWriter.java
index 8416d95..bee152b 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MapContextGTRecordWriter.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MapContextGTRecordWriter.java
@@ -1,76 +1,32 @@
 package org.apache.kylin.engine.mr.steps;
 
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.BitSet;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapreduce.MapContext;
 import org.apache.kylin.cube.CubeSegment;
-import org.apache.kylin.cube.cuboid.Cuboid;
-import org.apache.kylin.cube.inmemcubing.ICuboidWriter;
-import org.apache.kylin.cube.kv.AbstractRowKeyEncoder;
-import org.apache.kylin.cube.kv.RowConstants;
 import org.apache.kylin.cube.model.CubeDesc;
 import org.apache.kylin.engine.mr.ByteArrayWritable;
-import org.apache.kylin.gridtable.GTRecord;
+
+import java.io.IOException;
 
 /**
  */
-public class MapContextGTRecordWriter implements ICuboidWriter {
+public class MapContextGTRecordWriter extends KVGTRecordWriter {
 
     private static final Log logger = LogFactory.getLog(MapContextGTRecordWriter.class);
     protected MapContext<?, ?, ByteArrayWritable, ByteArrayWritable> mapContext;
-    private Long lastCuboidId;
-    protected CubeSegment cubeSegment;
-    protected CubeDesc cubeDesc;
-
-    private AbstractRowKeyEncoder rowKeyEncoder;
-    private int dimensions;
-    private int measureCount;
-    private byte[] keyBuf;
-    private int[] measureColumnsIndex;
-    private ByteBuffer valueBuf = ByteBuffer.allocate(RowConstants.ROWVALUE_BUFFER_SIZE);
-    private ByteArrayWritable outputKey = new ByteArrayWritable();
-    private ByteArrayWritable outputValue = new ByteArrayWritable();
-    private long cuboidRowCount = 0;
-
-    //for shard
 
     public MapContextGTRecordWriter(MapContext<?, ?, ByteArrayWritable, ByteArrayWritable> mapContext, CubeDesc cubeDesc, CubeSegment cubeSegment) {
+        super(cubeDesc, cubeSegment);
         this.mapContext = mapContext;
-        this.cubeDesc = cubeDesc;
-        this.cubeSegment = cubeSegment;
-        this.measureCount = cubeDesc.getMeasures().size();
     }
 
     @Override
-    public void write(long cuboidId, GTRecord record) throws IOException {
-
-        if (lastCuboidId == null || !lastCuboidId.equals(cuboidId)) {
-            if (lastCuboidId != null) {
-                logger.info("Cuboid " + lastCuboidId + " has " + cuboidRowCount + " rows");
-                cuboidRowCount = 0;
-            }
-            // output another cuboid
-            initVariables(cuboidId);
-            lastCuboidId = cuboidId;
-        }
-
-        cuboidRowCount++;
-        rowKeyEncoder.encode(record, record.getInfo().getPrimaryKey(), keyBuf);
-
-        //output measures
-        valueBuf.clear();
-        record.exportColumns(measureColumnsIndex, valueBuf);
-
-        outputKey.set(keyBuf, 0, keyBuf.length);
-        outputValue.set(valueBuf.array(), 0, valueBuf.position());
+    protected void writeAsKeyValue(ByteArrayWritable key, ByteArrayWritable value) throws IOException {
         try {
-            mapContext.write(outputKey, outputValue);
+            mapContext.write(key, value);
         } catch (InterruptedException e) {
-            throw new RuntimeException(e);
+            throw new IOException(e);
         }
     }
 
@@ -83,15 +39,4 @@ public class MapContextGTRecordWriter implements ICuboidWriter {
     public void close() {
 
     }
-
-    private void initVariables(Long cuboidId) {
-        rowKeyEncoder = AbstractRowKeyEncoder.createInstance(cubeSegment, Cuboid.findById(cubeDesc, cuboidId));
-        keyBuf = rowKeyEncoder.createBuf();
-
-        dimensions = Long.bitCount(cuboidId);
-        measureColumnsIndex = new int[measureCount];
-        for (int i = 0; i < measureCount; i++) {
-            measureColumnsIndex[i] = dimensions + i;
-        }
-    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/d0449451/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
index d7056cf..20ff01d 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
@@ -98,6 +98,14 @@ public class StreamingCubeBuilder implements StreamingBatchBuilder {
             throw new RuntimeException(e);
         } catch (ExecutionException e) {
             throw new RuntimeException("error build cube from StreamingBatch", e.getCause());
+        } catch (IOException e) {
+            throw new RuntimeException("error build cube from StreamingBatch", e.getCause());
+        } finally {
+            try {
+                cuboidWriter.close();
+            } catch (IOException e) {
+                throw new RuntimeException("error build cube from StreamingBatch", e.getCause());
+            }
         }
     }
 
@@ -106,7 +114,9 @@ public class StreamingCubeBuilder implements StreamingBatchBuilder {
         CubeManager cubeManager = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
         final CubeInstance cubeInstance = cubeManager.reloadCubeLocal(cubeName);
         try {
-            return cubeManager.appendSegments(cubeInstance, streamingBatch.getTimeRange().getFirst(), streamingBatch.getTimeRange().getSecond(), false, false);
+            CubeSegment segment = cubeManager.appendSegments(cubeInstance, streamingBatch.getTimeRange().getFirst(), streamingBatch.getTimeRange().getSecond(), false, false);
+            segment.setLastBuildJobID(segment.getUuid()); // give a fake job id
+            return segment;
         } catch (IOException e) {
             throw new RuntimeException("failed to create IBuildable", e);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/d0449451/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
index c4dc0b5..ddc868d 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
@@ -33,9 +33,8 @@
  */
 package org.apache.kylin.storage.hbase.steps;
 
-import java.io.IOException;
-import java.util.List;
-
+import com.google.common.collect.Lists;
+import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
@@ -51,13 +50,14 @@ import org.apache.kylin.gridtable.GTRecord;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Lists;
+import java.io.IOException;
+import java.util.List;
 
 /**
  */
-public final class HBaseCuboidWriter implements ICuboidWriter {
+public class HBaseCuboidWriter implements ICuboidWriter {
 
-    private static final Logger logger = LoggerFactory.getLogger(HBaseStreamingOutput.class);
+    private static final Logger logger = LoggerFactory.getLogger(HBaseCuboidWriter.class);
 
     private static final int BATCH_PUT_THRESHOLD = 10000;
 
@@ -125,8 +125,8 @@ public final class HBaseCuboidWriter implements ICuboidWriter {
         }
     }
 
-    public final void flush() {
-        try {
+    @Override
+    public final void flush() throws IOException {
             if (!puts.isEmpty()) {
                 long t = System.currentTimeMillis();
                 if (hTable != null) {
@@ -136,14 +136,12 @@ public final class HBaseCuboidWriter implements ICuboidWriter {
                 logger.info("commit total " + puts.size() + " puts, totally cost:" + (System.currentTimeMillis() - t) + "ms");
                 puts.clear();
             }
-        } catch (IOException e) {
-            throw new RuntimeException(e);
-        }
     }
 
     @Override
-    public void close() {
-
+    public void close() throws IOException {
+        flush();
+        IOUtils.closeQuietly(hTable);
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/d0449451/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java
index 4c2737d..7bb3647 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput2Transition.java
@@ -80,7 +80,7 @@ public class HBaseMROutput2Transition implements IMROutput2 {
 
             @Override
             public void addStepPhase3_Cleanup(DefaultChainedExecutable jobFlow) {
-                jobFlow.addTask(steps.createMergeGCStep());
+                steps.addMergingGarbageCollectionSteps(jobFlow);
             }
         };
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/d0449451/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
index 2a21640..a828728 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
@@ -161,7 +161,7 @@ public class HBaseMRSteps extends JobBuilderSupport {
         toDeletePaths.addAll(getMergingHDFSPaths());
 
         HDFSPathGarbageCollectionStep step = new HDFSPathGarbageCollectionStep();
-        step.setName(ExecutableConstants.STEP_NAME_GARBAGE_COLLECTION);
+        step.setName(ExecutableConstants.STEP_NAME_GARBAGE_COLLECTION_HDFS);
         step.setDeletePaths(toDeletePaths);
         step.setJobId(jobId);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/d0449451/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
index 770be3c..4cc4794 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
@@ -18,9 +18,11 @@
 package org.apache.kylin.storage.hbase.steps;
 
 import java.io.IOException;
+import java.util.List;
 import java.util.Map;
 import java.util.UUID;
 
+import com.google.common.collect.Lists;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -31,6 +33,7 @@ import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.measure.hllc.HyperLogLogPlusCounter;
 import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.inmemcubing.CompoundCuboidWriter;
 import org.apache.kylin.cube.inmemcubing.ICuboidWriter;
 import org.apache.kylin.engine.mr.HadoopUtil;
 import org.apache.kylin.engine.mr.common.BatchConstants;
@@ -54,7 +57,10 @@ public class HBaseStreamingOutput implements IStreamingOutput {
 
             final HTableInterface hTable;
             hTable = createHTable(cubeSegment);
-            return new HBaseCuboidWriter(cubeSegment, hTable);
+            List<ICuboidWriter> cuboidWriters = Lists.newArrayList();
+            cuboidWriters.add(new HBaseCuboidWriter(cubeSegment, hTable));
+            cuboidWriters.add(new SequenceFileCuboidWriter(cubeSegment.getCubeDesc(), cubeSegment));
+            return new CompoundCuboidWriter(cuboidWriters);
         } catch (IOException e) {
             throw new RuntimeException("failed to get ICuboidWriter", e);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/d0449451/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
new file mode 100644
index 0000000..4d76522
--- /dev/null
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
@@ -0,0 +1,75 @@
+package org.apache.kylin.storage.hbase.steps;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.Text;
+import org.apache.kylin.cube.CubeSegment;
+import org.apache.kylin.cube.model.CubeDesc;
+import org.apache.kylin.engine.mr.ByteArrayWritable;
+import org.apache.kylin.engine.mr.HadoopUtil;
+import org.apache.kylin.engine.mr.JobBuilderSupport;
+import org.apache.kylin.engine.mr.steps.KVGTRecordWriter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+
+/**
+ */
+public class SequenceFileCuboidWriter extends KVGTRecordWriter {
+
+    private static final Logger logger = LoggerFactory.getLogger(SequenceFileCuboidWriter.class);
+    private SequenceFile.Writer writer = null;
+
+    public SequenceFileCuboidWriter(CubeDesc cubeDesc, CubeSegment segment) {
+        super(cubeDesc, segment);
+    }
+
+
+    @Override
+    protected void writeAsKeyValue(ByteArrayWritable key, ByteArrayWritable value) throws IOException {
+        if (writer == null) {
+            synchronized (SequenceFileCuboidWriter.class) {
+                if (writer == null) {
+                    JobBuilderSupport jobBuilderSupport = new JobBuilderSupport(cubeSegment, "SYSTEM");
+                    String cuboidRoot = jobBuilderSupport.getCuboidRootPath(cubeSegment);
+                    Path cuboidPath = new Path(cuboidRoot);
+                    FileSystem fs = HadoopUtil.getFileSystem(cuboidRoot);
+                    try {
+                        if (fs.exists(cuboidPath)) {
+                            fs.delete(cuboidPath, true);
+                        }
+
+                        fs.mkdirs(cuboidPath);
+                    } finally {
+                        IOUtils.closeQuietly(fs);
+                    }
+
+                    Path cuboidFile = new Path(cuboidPath, "data.seq");
+                    logger.debug("Cuboid is written to " + cuboidFile);
+                    writer = SequenceFile.createWriter(HadoopUtil.getCurrentConfiguration(), SequenceFile.Writer.file(cuboidFile), SequenceFile.Writer.keyClass(Text.class), SequenceFile.Writer.valueClass(Text.class));
+                }
+            }
+        }
+
+        Text outputValue = new Text();
+        Text outputKey = new Text();
+        outputKey.set(key.array(), key.offset(), key.length());
+        outputValue.set(value.array(), value.offset(), value.length());
+        writer.append(outputKey, outputValue);
+    }
+
+    @Override
+    public void flush() throws IOException {
+        if (writer != null) {
+            writer.hflush();
+        }
+    }
+
+    @Override
+    public void close() throws IOException {
+        IOUtils.closeQuietly(writer);
+    }
+}


[43/43] kylin git commit: KYLIN-1420 enhance and update test case

Posted by sh...@apache.org.
KYLIN-1420 enhance and update test case


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/542f9a23
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/542f9a23
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/542f9a23

Branch: refs/heads/helix-rebase
Commit: 542f9a230827adcb6b800ca91a4f14fa0abb6722
Parents: 5c2c64f
Author: shaofengshi <sh...@apache.org>
Authored: Mon Feb 15 18:06:18 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:20 2016 +0800

----------------------------------------------------------------------
 .../kylin/gridtable/GTScanRangePlanner.java     | 27 ++++++++------------
 .../kylin/gridtable/DictGridTableTest.java      |  2 +-
 2 files changed, 11 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/542f9a23/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java
index d314dde..559a245 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java
@@ -1,17 +1,8 @@
 package org.apache.kylin.gridtable;
 
-import java.util.ArrayList;
-import java.util.BitSet;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
 import org.apache.kylin.common.debug.BackdoorToggles;
 import org.apache.kylin.common.util.ByteArray;
 import org.apache.kylin.common.util.ImmutableBitSet;
@@ -26,9 +17,7 @@ import org.apache.kylin.metadata.model.TblColRef;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
+import java.util.*;
 
 public class GTScanRangePlanner {
 
@@ -108,8 +97,10 @@ public class GTScanRangePlanner {
         for (ColumnRange range : andDimRanges) {
             if (partitionColRef != null && range.column.equals(partitionColRef)) {
                 if (rangeStartEndComparator.comparator.compare(segmentStartAndEnd.getFirst(), range.end) <= 0 //
-                        && rangeStartEndComparator.comparator.compare(range.begin, segmentStartAndEnd.getSecond()) <= 0) {
-                    //segment range is [Closed,Open), but segmentStartAndEnd.getSecond() might be rounded, so use <=. 
+                        && (rangeStartEndComparator.comparator.compare(range.begin, segmentStartAndEnd.getSecond()) < 0 //
+                        || rangeStartEndComparator.comparator.compare(range.begin, segmentStartAndEnd.getSecond()) == 0 //
+                        && (range.op == FilterOperatorEnum.EQ || range.op == FilterOperatorEnum.LTE || range.op == FilterOperatorEnum.GTE || range.op == FilterOperatorEnum.IN))) {
+                    //segment range is [Closed,Open), but segmentStartAndEnd.getSecond() might be rounded, so use <= when has equals in condition. 
                 } else {
                     logger.debug("Pre-check partition col filter failed, partitionColRef {}, segment start {}, segment end {}, range begin {}, range end {}",//
                             new Object[] { partitionColRef, makeReadable(segmentStartAndEnd.getFirst()), makeReadable(segmentStartAndEnd.getSecond()), makeReadable(range.begin), makeReadable(range.end) });
@@ -346,9 +337,11 @@ public class GTScanRangePlanner {
         private ByteArray begin = ByteArray.EMPTY;
         private ByteArray end = ByteArray.EMPTY;
         private Set<ByteArray> valueSet;
+        private FilterOperatorEnum op;
 
         public ColumnRange(TblColRef column, Set<ByteArray> values, FilterOperatorEnum op) {
             this.column = column;
+            this.op = op;
 
             switch (op) {
             case EQ:

http://git-wip-us.apache.org/repos/asf/kylin/blob/542f9a23/core-cube/src/test/java/org/apache/kylin/gridtable/DictGridTableTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/gridtable/DictGridTableTest.java b/core-cube/src/test/java/org/apache/kylin/gridtable/DictGridTableTest.java
index df69c17..674aa15 100644
--- a/core-cube/src/test/java/org/apache/kylin/gridtable/DictGridTableTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/gridtable/DictGridTableTest.java
@@ -118,7 +118,7 @@ public class DictGridTableTest {
         {
             LogicalTupleFilter filter = and(timeComp4, ageComp1);
             List<GTScanRange> r = planner.planScanRanges(filter);
-            assertEquals(0, r.size());
+            assertEquals(1, r.size());
         }
         {
             LogicalTupleFilter filter = and(timeComp5, ageComp1);


[03/43] kylin git commit: correct hierarchyMasks building in AggregationGroup

Posted by sh...@apache.org.
correct hierarchyMasks building in AggregationGroup


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/0f48f10b
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/0f48f10b
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/0f48f10b

Branch: refs/heads/helix-rebase
Commit: 0f48f10bc1347c89c01cd28227bec15714601a63
Parents: ab4d890
Author: sunyerui <su...@gmail.com>
Authored: Sun Feb 28 21:36:17 2016 +0800
Committer: sunyerui <su...@gmail.com>
Committed: Sun Feb 28 21:36:17 2016 +0800

----------------------------------------------------------------------
 .../main/java/org/apache/kylin/cube/model/AggregationGroup.java   | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/0f48f10b/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java b/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java
index 905f8dc..35f85b0 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java
@@ -129,8 +129,6 @@ public class AggregationGroup {
     }
 
     private void buildHierarchyMasks(Map<String, TblColRef> colNameAbbr, RowKeyDesc rowKeyDesc) {
-
-        HierarchyMask mask = new HierarchyMask();
         this.hierarchyMasks = new ArrayList<HierarchyMask>();
 
         if (this.selectRule.hierarchy_dims == null || this.selectRule.hierarchy_dims.length == 0) {
@@ -138,6 +136,7 @@ public class AggregationGroup {
         }
 
         for (String[] hierarchy_dims : this.selectRule.hierarchy_dims) {
+            HierarchyMask mask = new HierarchyMask();
             if (hierarchy_dims == null || hierarchy_dims.length == 0) {
                 continue;
             }


[17/43] kylin git commit: KYLIN-1383 remove deploy.env from front end, for permission control only depend on acl.defaultRole and acl.adminRole in kylin.properties

Posted by sh...@apache.org.
KYLIN-1383 remove deploy.env from front end, for permission control only depend on acl.defaultRole and acl.adminRole in kylin.properties


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/2d4922dc
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/2d4922dc
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/2d4922dc

Branch: refs/heads/helix-rebase
Commit: 2d4922dc6a55aa9376d424f1fce5454f7151e3de
Parents: c4d94f7
Author: Jason <ji...@163.com>
Authored: Thu Mar 3 14:24:38 2016 +0800
Committer: Jason <ji...@163.com>
Committed: Thu Mar 3 14:25:32 2016 +0800

----------------------------------------------------------------------
 webapp/app/js/services/kylinProperties.js  | 1 +
 webapp/app/partials/jobs/jobs.html         | 2 +-
 webapp/app/partials/models/models.html     | 2 +-
 webapp/app/partials/projects/projects.html | 2 +-
 webapp/app/partials/query/query.html       | 2 +-
 5 files changed, 5 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/2d4922dc/webapp/app/js/services/kylinProperties.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/services/kylinProperties.js b/webapp/app/js/services/kylinProperties.js
index 546db2b..68e8766 100644
--- a/webapp/app/js/services/kylinProperties.js
+++ b/webapp/app/js/services/kylinProperties.js
@@ -55,6 +55,7 @@ KylinApp.service('kylinConfig', function (AdminService, $log) {
     return false;
   }
 
+  //deprecated
   this.getDeployEnv = function () {
     this.deployEnv = this.getProperty("deploy.env");
     if (!this.deployEnv) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/2d4922dc/webapp/app/partials/jobs/jobs.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/jobs/jobs.html b/webapp/app/partials/jobs/jobs.html
index daf4578..cc5840b 100644
--- a/webapp/app/partials/jobs/jobs.html
+++ b/webapp/app/partials/jobs/jobs.html
@@ -22,7 +22,7 @@
         <form ng-if="userService.isAuthorized()">
             <div class="form-group" ng-if="userService.hasRole('ROLE_MODELER')" >
                 <a class="btn btn-xs btn-info" href="projects" tooltip="Manage Project"><i class="fa fa-gears"></i></a>
-              <a class="btn btn-xs btn-primary" ng-if="userService.hasRole('ROLE_ADMIN')||kylinConfig.getDeployEnv()!=='PROD'" style="width: 29px" tooltip="Add Project" ng-click="toCreateProj()">
+              <a class="btn btn-xs btn-primary" ng-if="userService.hasRole('ROLE_MODELER')" style="width: 29px" tooltip="Add Project" ng-click="toCreateProj()">
                     <i class="fa fa-plus"></i>
                 </a>
             </div>

http://git-wip-us.apache.org/repos/asf/kylin/blob/2d4922dc/webapp/app/partials/models/models.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/models/models.html b/webapp/app/partials/models/models.html
index 88cc6f3..c8e6ed1 100644
--- a/webapp/app/partials/models/models.html
+++ b/webapp/app/partials/models/models.html
@@ -21,7 +21,7 @@
     <form class="navbar-form navbar-left" style="margin-top: 0px !important;" ng-if="userService.isAuthorized()">
         <div class="form-group" ng-if="userService.hasRole('ROLE_MODELER')" >
             <a class="btn btn-xs btn-info" href="projects" tooltip="Manage Project"><i class="fa fa-gears"></i></a>
-          <a class="btn btn-xs btn-primary" ng-if="userService.hasRole('ROLE_ADMIN')||kylinConfig.getDeployEnv()!=='PROD'" style="width: 29px" tooltip="Add Project" ng-click="toCreateProj()">
+          <a class="btn btn-xs btn-primary" ng-if="userService.hasRole('ROLE_MODELER')" style="width: 29px" tooltip="Add Project" ng-click="toCreateProj()">
                 <i class="fa fa-plus"></i>
             </a>
         </div>

http://git-wip-us.apache.org/repos/asf/kylin/blob/2d4922dc/webapp/app/partials/projects/projects.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/projects/projects.html b/webapp/app/partials/projects/projects.html
index 26a2037..96e4a91 100644
--- a/webapp/app/partials/projects/projects.html
+++ b/webapp/app/partials/projects/projects.html
@@ -17,7 +17,7 @@
 -->
 
 <div class="page-header">
-    <button class="btn btn-primary btn-sm" ng-if="userService.hasRole('ROLE_ADMIN')||userService.hasRole('ROLE_MODELER')&&kylinConfig.getDeployEnv()!=='PROD'" ng-click="toCreateProj()"><i class="fa fa-plus"></i> Project</button>
+    <button class="btn btn-primary btn-sm" ng-if="userService.hasRole('ROLE_MODELER')" ng-click="toCreateProj()"><i class="fa fa-plus"></i> Project</button>
 </div>
 
 <div ng-if="!loading && projects.length == 0">

http://git-wip-us.apache.org/repos/asf/kylin/blob/2d4922dc/webapp/app/partials/query/query.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/query/query.html b/webapp/app/partials/query/query.html
index 0af08d6..69fe649 100644
--- a/webapp/app/partials/query/query.html
+++ b/webapp/app/partials/query/query.html
@@ -21,7 +21,7 @@
     <form class="navbar-form navbar-left" style="margin-top: 0px !important;" ng-if="userService.isAuthorized()">
         <div class="form-group" ng-if="userService.hasRole('ROLE_MODELER')">
             <a class="btn btn-xs btn-info" href="projects" tooltip="Manage Project"><i class="fa fa-gears"></i></a>
-          <a class="btn btn-xs btn-primary" ng-if="userService.hasRole('ROLE_ADMIN')||kylinConfig.getDeployEnv()!=='PROD'" style="width: 29px" tooltip="Add Project" ng-click="toCreateProj()">
+          <a class="btn btn-xs btn-primary" ng-if="userService.hasRole('ROLE_MODELER')" style="width: 29px" tooltip="Add Project" ng-click="toCreateProj()">
                 <i class="fa fa-plus"></i>
             </a>
         </div>


[13/43] kylin git commit: minor, remove unused files

Posted by sh...@apache.org.
minor, remove unused files


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/1ea781f0
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/1ea781f0
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/1ea781f0

Branch: refs/heads/helix-rebase
Commit: 1ea781f0005c1e20c0e775e660e47a6964f07bcb
Parents: 3fb67ca
Author: lidongsjtu <li...@apache.org>
Authored: Wed Mar 2 17:34:59 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Wed Mar 2 17:34:59 2016 +0800

----------------------------------------------------------------------
 ...port-load-hive-table-from-listed-tree-.patch | 864 -------------------
 1 file changed, 864 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/1ea781f0/0001-KYLIN-1074-support-load-hive-table-from-listed-tree-.patch
----------------------------------------------------------------------
diff --git a/0001-KYLIN-1074-support-load-hive-table-from-listed-tree-.patch b/0001-KYLIN-1074-support-load-hive-table-from-listed-tree-.patch
deleted file mode 100644
index 31cc017..0000000
--- a/0001-KYLIN-1074-support-load-hive-table-from-listed-tree-.patch
+++ /dev/null
@@ -1,864 +0,0 @@
-From 1a79ef1aec557259f9611f5b3199c2e90400be77 Mon Sep 17 00:00:00 2001
-From: Jason <ji...@163.com>
-Date: Wed, 2 Mar 2016 14:40:19 +0800
-Subject: [PATCH] KYLIN-1074 support load hive table from listed tree, patch
- from @nichunen
-
----
- build/conf/kylin.properties                        |   2 +
- examples/test_case_data/sandbox/kylin.properties   |   1 +
- pom.xml                                            |   2 +
- .../kylin/rest/controller/TableController.java     |  44 +++
- .../org/apache/kylin/source/hive/HiveClient.java   |   8 +
- webapp/app/index.html                              |   1 +
- webapp/app/js/controllers/sourceMeta.js            | 186 ++++++++++-
- webapp/app/js/directives/angular-tree-control.js   | 363 +++++++++++++++++++++
- webapp/app/js/services/kylinProperties.js          |  15 +-
- webapp/app/js/services/tables.js                   |   7 +-
- webapp/app/partials/tables/source_table_tree.html  |  26 ++
- webapp/bower.json                                  |   3 +-
- webapp/grunt.json                                  |   1 -
- 13 files changed, 649 insertions(+), 10 deletions(-)
- create mode 100644 webapp/app/js/directives/angular-tree-control.js
-
-diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
-index a4b8c3b..e8add7c 100644
---- a/build/conf/kylin.properties
-+++ b/build/conf/kylin.properties
-@@ -158,3 +158,5 @@ deploy.env=DEV
- 
- ###########################deprecated configs#######################
- kylin.sandbox=true
-+
-+kylin.web.hive.limit=20
-\ No newline at end of file
-diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
-index 9451b78..1a74b80 100644
---- a/examples/test_case_data/sandbox/kylin.properties
-+++ b/examples/test_case_data/sandbox/kylin.properties
-@@ -131,3 +131,4 @@ kylin.web.contact_mail=
- deploy.env=DEV
- 
- 
-+kylin.web.hive.limit=20
-\ No newline at end of file
-diff --git a/pom.xml b/pom.xml
-index 9d9a54b..537693f 100644
---- a/pom.xml
-+++ b/pom.xml
-@@ -774,6 +774,8 @@
-                                 <!-- MIT license -->
-                                 <exclude>webapp/app/css/AdminLTE.css</exclude>
-                                 <exclude>webapp/app/js/directives/kylin_abn_tree_directive.js</exclude>
-+                                <exclude>webapp/app/js/directives/angular-tree-control.js</exclude>
-+
- 
-                                 <!--configuration file -->
-                                 <exclude>webapp/app/routes.json</exclude>
-diff --git a/server/src/main/java/org/apache/kylin/rest/controller/TableController.java b/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
-index 39af7db..ea5fdd4 100644
---- a/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
-+++ b/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
-@@ -33,6 +33,7 @@ import org.apache.kylin.rest.request.CardinalityRequest;
- import org.apache.kylin.rest.request.StreamingRequest;
- import org.apache.kylin.rest.response.TableDescResponse;
- import org.apache.kylin.rest.service.CubeService;
-+import org.apache.kylin.source.hive.HiveClient;
- import org.slf4j.Logger;
- import org.slf4j.LoggerFactory;
- import org.springframework.beans.factory.annotation.Autowired;
-@@ -205,6 +206,49 @@ public class TableController extends BasicController {
-         return descs;
-     }
- 
-+    /**
-+     * Show all databases in Hive
-+     *
-+     * @return Hive databases list
-+     * @throws IOException
-+     */
-+    @RequestMapping(value = "/hive", method = { RequestMethod.GET })
-+    @ResponseBody
-+    private static List<String> showHiveDatabases() throws IOException {
-+        HiveClient hiveClient = new HiveClient();
-+        List<String> results = null;
-+
-+        try {
-+            results = hiveClient.getHiveDbNames();
-+        } catch (Exception e) {
-+            e.printStackTrace();
-+            throw new IOException(e);
-+        }
-+        return results;
-+    }
-+
-+    /**
-+     * Show all tables in a Hive database
-+     *
-+     * @return Hive table list
-+     * @throws IOException
-+     */
-+    @RequestMapping(value = "/hive/{database}", method = { RequestMethod.GET })
-+    @ResponseBody
-+    private static List<String> showHiveTables(@PathVariable String database) throws IOException {
-+        HiveClient hiveClient = new HiveClient();
-+        List<String> results = null;
-+
-+        try {
-+            results = hiveClient.getHiveTableNames(database);
-+        } catch (Exception e) {
-+            e.printStackTrace();
-+            throw new IOException(e);
-+        }
-+        return results;
-+    }
-+
-+
-     public void setCubeService(CubeService cubeService) {
-         this.cubeMgmtService = cubeService;
-     }
-diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
-index 178889e..a99b304 100644
---- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
-+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
-@@ -132,6 +132,14 @@ public class HiveClient {
-         return getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.NUM_FILES);
-     }
- 
-+    public List<String> getHiveDbNames() throws Exception {
-+        return getMetaStoreClient().getAllDatabases();
-+    }
-+
-+    public List<String> getHiveTableNames(String database) throws Exception {
-+        return getMetaStoreClient().getAllTables(database);
-+    }
-+
-     /**
-      * COPIED FROM org.apache.hadoop.hive.ql.stats.StatsUtil for backward compatibility
-      * 
-diff --git a/webapp/app/index.html b/webapp/app/index.html
-index 11ca283..b4eb9d7 100644
---- a/webapp/app/index.html
-+++ b/webapp/app/index.html
-@@ -113,6 +113,7 @@
- <script src="js/filters/filter.js"></script>
- <script src="js/directives/directives.js"></script>
- <script src="js/directives/kylin_abn_tree_directive.js"></script>
-+<script src="js/directives/angular-tree-control.js"></script>
- <script src="js/factories/graph.js"></script>
- <script src="js/services/cache.js"></script>
- <script src="js/services/message.js"></script>
-diff --git a/webapp/app/js/controllers/sourceMeta.js b/webapp/app/js/controllers/sourceMeta.js
-index abdeeb8..c87d6ef 100755
---- a/webapp/app/js/controllers/sourceMeta.js
-+++ b/webapp/app/js/controllers/sourceMeta.js
-@@ -19,14 +19,14 @@
- 'use strict';
- 
- KylinApp
--  .controller('SourceMetaCtrl', function ($scope, $cacheFactory, $q, $window, $routeParams, CubeService, $modal, TableService, $route, loadingRequest, SweetAlert, tableConfig, TableModel,cubeConfig) {
-+  .controller('SourceMetaCtrl', function ($scope, $cacheFactory, $q, $window, $routeParams, CubeService, $modal, TableService, $route, loadingRequest, SweetAlert, tableConfig, TableModel,cubeConfig,kylinConfig) {
-     var $httpDefaultCache = $cacheFactory.get('$http');
-     $scope.tableModel = TableModel;
-     $scope.tableModel.selectedSrcDb = [];
-     $scope.tableModel.selectedSrcTable = {};
-     $scope.window = 0.68 * $window.innerHeight;
-     $scope.tableConfig = tableConfig;
--
-+    $scope.kylinConfig = kylinConfig;
- 
-     $scope.state = {
-       filterAttr: 'id', filterReverse: false, reverseColumn: 'id',
-@@ -100,13 +100,193 @@ KylinApp
-       });
-     };
- 
--    var ModalInstanceCtrl = function ($scope, $location, $modalInstance, tableNames, MessageService, projectName, scope) {
-+    $scope.openTreeModal = function () {
-+      $modal.open({
-+        templateUrl: 'addHiveTableFromTree.html',
-+        controller: ModalInstanceCtrl,
-+        resolve: {
-+          tableNames: function () {
-+            return $scope.tableNames;
-+          },
-+          projectName:function(){
-+            return  $scope.projectModel.selectedProject;
-+          },
-+          scope: function () {
-+            return $scope;
-+          }
-+        }
-+      });
-+    };
-+
-+    var ModalInstanceCtrl = function ($scope, $location, $modalInstance, tableNames, MessageService, projectName, scope,kylinConfig) {
-       $scope.tableNames = "";
-       $scope.projectName = projectName;
-       $scope.cancel = function () {
-         $modalInstance.dismiss('cancel');
-       };
-+
-+      $scope.kylinConfig = kylinConfig;
-+
-+
-+      $scope.treeOptions = {multiSelection: true};
-+      $scope.selectedNodes = [];
-+      $scope.hiveLimit =  kylinConfig.getHiveLimit();
-+
-+      $scope.loadHive = function () {
-+        if($scope.hiveLoaded)
-+          return;
-+        TableService.showHiveDatabases({}, function (databases) {
-+          $scope.dbNum = databases.length;
-+          if (databases.length > 0) {
-+            $scope.hiveMap = {};
-+            for (var i = 0; i < databases.length; i++) {
-+              var dbName = databases[i];
-+              var hiveData = {"dbname":dbName,"tables":[],"expanded":false};
-+              $scope.hive.push(hiveData);
-+              $scope.hiveMap[dbName] = i;
-+            }
-+          }
-+          $scope.hiveLoaded = true;
-+          $scope.showMoreDatabases();
-+        });
-+      }
-+
-+      $scope.showMoreTables = function(hiveTables, node){
-+        var shownTimes = parseInt(node.children.length / $scope.hiveLimit);
-+        var from = $scope.hiveLimit * shownTimes;
-+        var to = 0;
-+        var hasMore = false;
-+        if(from + $scope.hiveLimit > hiveTables.length) {
-+          to = hiveTables.length - 1;
-+        } else {
-+          to = from + $scope.hiveLimit - 1;
-+          hasMore = true;
-+        }
-+        if(!angular.isUndefined(node.children[from])){
-+          node.children.pop();
-+        }
-+
-+        for(var idx = from; idx <= to; idx++){
-+          node.children.push({"label":node.label+'.'+hiveTables[idx],"id":idx-from+1,"children":[]});
-+        }
-+
-+        if(hasMore){
-+          var loading = {"label":"","id":65535,"children":[]};
-+          node.children.push(loading);
-+        }
-+      }
-+
-+      $scope.showAllTables = function(hiveTables, node){
-+        var shownTimes = parseInt(node.children.length / $scope.hiveLimit);
-+        var from = $scope.hiveLimit * shownTimes;
-+        var to = hiveTables.length - 1;
-+        if(!angular.isUndefined(node.children[from])){
-+          node.children.pop();
-+        }
-+        for(var idx = from; idx <= to; idx++){
-+          node.children.push({"label":node.label+'.'+hiveTables[idx],"id":idx-from+1,"children":[]});
-+        }
-+      }
-+
-+      $scope.showMoreDatabases = function(){
-+        var shownTimes = parseInt($scope.treedata.length / $scope.hiveLimit);
-+        var from = $scope.hiveLimit * shownTimes;
-+        var to = 0;
-+        var hasMore = false;
-+        if(from + $scope.hiveLimit > $scope.hive.length) {
-+          to = $scope.hive.length - 1;
-+        } else {
-+          to = from + $scope.hiveLimit - 1;
-+          hasMore = true;
-+        }
-+        if(!angular.isUndefined($scope.treedata[from])){
-+          $scope.treedata.pop();
-+        }
-+
-+        for(var idx = from; idx <= to; idx++){
-+          var children = [];
-+          var loading = {"label":"","id":0,"children":[]};
-+          children.push(loading);
-+          $scope.treedata.push({"label":$scope.hive[idx].dbname,"id":idx+1,"children":children,"expanded":false});
-+        }
-+
-+        if(hasMore){
-+          var loading = {"label":"","id":65535,"children":[0]};
-+          $scope.treedata.push(loading);
-+        }
-+      }
-+
-+      $scope.showAllDatabases = function(){
-+        var shownTimes = parseInt($scope.treedata.length / $scope.hiveLimit);
-+        var from = $scope.hiveLimit * shownTimes;
-+        var to = $scope.hive.length - 1;
-+
-+        if(!angular.isUndefined($scope.treedata[from])){
-+          $scope.treedata.pop();
-+        }
-+
-+        for(var idx = from; idx <= to; idx++){
-+          var children = [];
-+          var loading = {"label":"","id":0,"children":[]};
-+          children.push(loading);
-+          $scope.treedata.push({"label":$scope.hive[idx].dbname,"id":idx+1,"children":children,"expanded":false});
-+        }
-+      }
-+
-+      $scope.showMoreClicked = function($parentNode){
-+        if($parentNode == null){
-+          $scope.showMoreDatabases();
-+        } else {
-+          $scope.showMoreTables($scope.hive[$scope.hiveMap[$parentNode.label]].tables,$parentNode);
-+        }
-+      }
-+
-+      $scope.showAllClicked = function($parentNode){
-+        if($parentNode == null){
-+          $scope.showAllDatabases();
-+        } else {
-+          $scope.showAllTables($scope.hive[$scope.hiveMap[$parentNode.label]].tables,$parentNode);
-+        }
-+      }
-+
-+      $scope.showToggle = function(node) {
-+        if(node.expanded == false){
-+          TableService.showHiveTables({"database": node.label},function (hive_tables){
-+            var tables = [];
-+            for (var i = 0; i < hive_tables.length; i++) {
-+              tables.push(hive_tables[i]);
-+            }
-+            $scope.hive[$scope.hiveMap[node.label]].tables = tables;
-+            $scope.showMoreTables(tables,node);
-+            node.expanded = true;
-+          });
-+        }
-+      }
-+
-+      $scope.showSelected = function(node) {
-+
-+      }
-+
-+      if(angular.isUndefined($scope.hive) || angular.isUndefined($scope.hiveLoaded) || angular.isUndefined($scope.treedata) ){
-+        $scope.hive = [];
-+        $scope.hiveLoaded = false;
-+        $scope.treedata = [];
-+        $scope.loadHive();
-+      }
-+
-+
-+
-+
-       $scope.add = function () {
-+
-+        if($scope.tableNames.length === 0 && $scope.selectedNodes.length > 0) {
-+          for(var i = 0; i <  $scope.selectedNodes.length; i++){
-+            if($scope.selectedNodes[i].label.indexOf(".") >= 0){
-+              $scope.tableNames += ($scope.selectedNodes[i].label) += ',';
-+            }
-+          }
-+        }
-+
-         if ($scope.tableNames.trim() === "") {
-           SweetAlert.swal('', 'Please input table(s) you want to synchronize.', 'info');
-           return;
-diff --git a/webapp/app/js/directives/angular-tree-control.js b/webapp/app/js/directives/angular-tree-control.js
-new file mode 100644
-index 0000000..6fca987
---- /dev/null
-+++ b/webapp/app/js/directives/angular-tree-control.js
-@@ -0,0 +1,363 @@
-+/*
-+ * The MIT License (MIT)
-+ *
-+ * Copyright (c) 2013 Steve
-+ *
-+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
-+ * this software and associated documentation files (the "Software"), to deal in
-+ * the Software without restriction, including without limitation the rights to
-+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-+ * the Software, and to permit persons to whom the Software is furnished to do so,
-+ *   subject to the following conditions:
-+ *
-+ *   The above copyright notice and this permission notice shall be included in all
-+ * copies or substantial portions of the Software.
-+ *
-+ *   THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-+ * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-+ * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-+ * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-+ * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-+ */
-+
-+(function ( angular ) {
-+  'use strict';
-+
-+  angular.module( 'treeControl', [] )
-+    .directive( 'treecontrol', ['$compile', function( $compile ) {
-+      /**
-+       * @param cssClass - the css class
-+       * @param addClassProperty - should we wrap the class name with class=""
-+       */
-+      function classIfDefined(cssClass, addClassProperty) {
-+        if (cssClass) {
-+          if (addClassProperty)
-+            return 'class="' + cssClass + '"';
-+          else
-+            return cssClass;
-+        }
-+        else
-+          return "";
-+      }
-+
-+      function ensureDefault(obj, prop, value) {
-+        if (!obj.hasOwnProperty(prop))
-+          obj[prop] = value;
-+      }
-+
-+      return {
-+        restrict: 'EA',
-+        require: "treecontrol",
-+        transclude: true,
-+        scope: {
-+          treeModel: "=",
-+          selectedNode: "=?",
-+          selectedNodes: "=?",
-+          expandedNodes: "=?",
-+          onSelection: "&",
-+          onNodeToggle: "&",
-+          options: "=?",
-+          orderBy: "@",
-+          reverseOrder: "@",
-+          filterExpression: "=?",
-+          filterComparator: "=?",
-+          onDblclick: "&"
-+        },
-+        controller: ['$scope', function( $scope ) {
-+
-+          function defaultIsLeaf(node) {
-+            return !node[$scope.options.nodeChildren] || node[$scope.options.nodeChildren].length === 0;
-+          }
-+
-+          function shallowCopy(src, dst) {
-+            if (angular.isArray(src)) {
-+              dst = dst || [];
-+
-+              for ( var i = 0; i < src.length; i++) {
-+                dst[i] = src[i];
-+              }
-+            } else if (angular.isObject(src)) {
-+              dst = dst || {};
-+
-+              for (var key in src) {
-+                if (hasOwnProperty.call(src, key) && !(key.charAt(0) === '$' && key.charAt(1) === '$')) {
-+                  dst[key] = src[key];
-+                }
-+              }
-+            }
-+
-+            return dst || src;
-+          }
-+          function defaultEquality(a, b) {
-+            if (a === undefined || b === undefined)
-+              return false;
-+            a = shallowCopy(a);
-+            a[$scope.options.nodeChildren] = [];
-+            b = shallowCopy(b);
-+            b[$scope.options.nodeChildren] = [];
-+            return angular.equals(a, b);
-+          }
-+
-+          $scope.options = $scope.options || {};
-+          ensureDefault($scope.options, "multiSelection", false);
-+          ensureDefault($scope.options, "nodeChildren", "children");
-+          ensureDefault($scope.options, "dirSelectable", "true");
-+          ensureDefault($scope.options, "injectClasses", {});
-+          ensureDefault($scope.options.injectClasses, "ul", "");
-+          ensureDefault($scope.options.injectClasses, "li", "");
-+          ensureDefault($scope.options.injectClasses, "liSelected", "");
-+          ensureDefault($scope.options.injectClasses, "iExpanded", "");
-+          ensureDefault($scope.options.injectClasses, "iCollapsed", "");
-+          ensureDefault($scope.options.injectClasses, "iLeaf", "");
-+          ensureDefault($scope.options.injectClasses, "label", "");
-+          ensureDefault($scope.options.injectClasses, "labelSelected", "");
-+          ensureDefault($scope.options, "equality", defaultEquality);
-+          ensureDefault($scope.options, "isLeaf", defaultIsLeaf);
-+
-+          $scope.selectedNodes = $scope.selectedNodes || [];
-+          $scope.expandedNodes = $scope.expandedNodes || [];
-+          $scope.expandedNodesMap = {};
-+          for (var i=0; i < $scope.expandedNodes.length; i++) {
-+            $scope.expandedNodesMap[""+i] = $scope.expandedNodes[i];
-+          }
-+          $scope.parentScopeOfTree = $scope.$parent;
-+
-+
-+          function isSelectedNode(node) {
-+            if (!$scope.options.multiSelection && ($scope.options.equality(node, $scope.selectedNode)))
-+              return true;
-+            else if ($scope.options.multiSelection && $scope.selectedNodes) {
-+              for (var i = 0; (i < $scope.selectedNodes.length); i++) {
-+                if ($scope.options.equality(node, $scope.selectedNodes[i])) {
-+                  return true;
-+                }
-+              }
-+              return false;
-+            }
-+          }
-+
-+          $scope.headClass = function(node) {
-+            var liSelectionClass = classIfDefined($scope.options.injectClasses.liSelected, false);
-+            var injectSelectionClass = "";
-+            if (liSelectionClass && isSelectedNode(node))
-+              injectSelectionClass = " " + liSelectionClass;
-+            if ($scope.options.isLeaf(node))
-+              return "tree-leaf" + injectSelectionClass;
-+            if ($scope.expandedNodesMap[this.$id])
-+              return "tree-expanded" + injectSelectionClass;
-+            else
-+              return "tree-collapsed" + injectSelectionClass;
-+          };
-+
-+          $scope.iBranchClass = function() {
-+            if ($scope.expandedNodesMap[this.$id])
-+              return classIfDefined($scope.options.injectClasses.iExpanded);
-+            else
-+              return classIfDefined($scope.options.injectClasses.iCollapsed);
-+          };
-+
-+          $scope.nodeExpanded = function() {
-+            return !!$scope.expandedNodesMap[this.$id];
-+          };
-+
-+          $scope.selectNodeHead = function() {
-+            var expanding = $scope.expandedNodesMap[this.$id] === undefined;
-+            $scope.expandedNodesMap[this.$id] = (expanding ? this.node : undefined);
-+            if (expanding) {
-+              $scope.expandedNodes.push(this.node);
-+            }
-+            else {
-+              var index;
-+              for (var i=0; (i < $scope.expandedNodes.length) && !index; i++) {
-+                if ($scope.options.equality($scope.expandedNodes[i], this.node)) {
-+                  index = i;
-+                }
-+              }
-+              if (index != undefined)
-+                $scope.expandedNodes.splice(index, 1);
-+            }
-+            if ($scope.onNodeToggle)
-+              $scope.onNodeToggle({node: this.node, expanded: expanding});
-+          };
-+
-+          $scope.selectNodeLabel = function( selectedNode ){
-+            if(selectedNode[$scope.options.nodeChildren] && selectedNode[$scope.options.nodeChildren].length > 0){
-+              this.selectNodeHead();
-+            }
-+            if($scope.options.dirSelectable || !(selectedNode[$scope.options.nodeChildren] && selectedNode[$scope.options.nodeChildren].length > 0) )
-+             {
-+              var selected = false;
-+              if ($scope.options.multiSelection) {
-+                var pos = $scope.selectedNodes.indexOf(selectedNode);
-+                if (pos === -1) {
-+                  $scope.selectedNodes.push(selectedNode);
-+                  selected = true;
-+                } else {
-+                  $scope.selectedNodes.splice(pos, 1);
-+                }
-+              } else {
-+                if ($scope.selectedNode != selectedNode) {
-+                  $scope.selectedNode = selectedNode;
-+                  selected = true;
-+                }
-+                else {
-+                  $scope.selectedNode = undefined;
-+                }
-+              }
-+              if ($scope.onSelection)
-+                $scope.onSelection({node: selectedNode, selected: selected});
-+            }
-+          };
-+
-+
-+          $scope.dblClickNode = function(selectedNode){
-+            if($scope.onDblclick!=null){
-+              $scope.onDblclick({node:selectedNode});
-+            }
-+          }
-+
-+          $scope.selectedClass = function() {
-+            var isThisNodeSelected = isSelectedNode(this.node);
-+            var labelSelectionClass = classIfDefined($scope.options.injectClasses.labelSelected, false);
-+            var injectSelectionClass = "";
-+            if (labelSelectionClass && isThisNodeSelected)
-+              injectSelectionClass = " " + labelSelectionClass;
-+
-+            return isThisNodeSelected?"tree-selected" + injectSelectionClass:"";
-+          };
-+
-+          //tree template
-+          var orderBy = $scope.orderBy ? ' | orderBy:orderBy:reverseOrder' : '';
-+          var template =
-+            '<ul '+classIfDefined($scope.options.injectClasses.ul, true)+'>' +
-+            '<li ng-repeat="node in node.' + $scope.options.nodeChildren + ' | filter:filterExpression:filterComparator ' + orderBy + '" ng-class="headClass(node)" '+classIfDefined($scope.options.injectClasses.li, true)+'>' +
-+            '<i class="tree-branch-head" ng-class="iBranchClass()" ng-click="selectNodeHead(node)"></i>' +
-+            '<i class="tree-leaf-head '+classIfDefined($scope.options.injectClasses.iLeaf, false)+'"></i>' +
-+            '<div class="tree-label '+classIfDefined($scope.options.injectClasses.label, false)+'" ng-class="selectedClass()" ng-click="selectNodeLabel(node)" ng-dblclick="dblClickNode(node)" tree-transclude></div>' +
-+            '<treeitem ng-if="nodeExpanded()"></treeitem>' +
-+            '</li>' +
-+            '</ul>';
-+
-+          this.template = $compile(template);
-+        }],
-+        compile: function(element, attrs, childTranscludeFn) {
-+          return function ( scope, element, attrs, treemodelCntr ) {
-+
-+            scope.$watch("treeModel", function updateNodeOnRootScope(newValue) {
-+              if (angular.isArray(newValue)) {
-+                if (angular.isDefined(scope.node) && angular.equals(scope.node[scope.options.nodeChildren], newValue))
-+                  return;
-+                scope.node = {};
-+                scope.synteticRoot = scope.node;
-+                scope.node[scope.options.nodeChildren] = newValue;
-+              }
-+              else {
-+                if (angular.equals(scope.node, newValue))
-+                  return;
-+                scope.node = newValue;
-+              }
-+            });
-+
-+            scope.$watchCollection('expandedNodes', function(newValue) {
-+              var notFoundIds = 0;
-+              var newExpandedNodesMap = {};
-+              var $liElements = element.find('li');
-+              var existingScopes = [];
-+              // find all nodes visible on the tree and the scope $id of the scopes including them
-+              angular.forEach($liElements, function(liElement) {
-+                var $liElement = angular.element(liElement);
-+                var liScope = $liElement.scope();
-+                existingScopes.push(liScope);
-+              });
-+              // iterate over the newValue, the new expanded nodes, and for each find it in the existingNodesAndScopes
-+              // if found, add the mapping $id -> node into newExpandedNodesMap
-+              // if not found, add the mapping num -> node into newExpandedNodesMap
-+              angular.forEach(newValue, function(newExNode) {
-+                var found = false;
-+                for (var i=0; (i < existingScopes.length) && !found; i++) {
-+                  var existingScope = existingScopes[i];
-+                  if (scope.options.equality(newExNode, existingScope.node)) {
-+                    newExpandedNodesMap[existingScope.$id] = existingScope.node;
-+                    found = true;
-+                  }
-+                }
-+                if (!found)
-+                  newExpandedNodesMap[notFoundIds++] = newExNode;
-+              });
-+              scope.expandedNodesMap = newExpandedNodesMap;
-+            });
-+
-+//                        scope.$watch('expandedNodesMap', function(newValue) {
-+//
-+//                        });
-+
-+            //Rendering template for a root node
-+            treemodelCntr.template( scope, function(clone) {
-+              element.html('').append( clone );
-+            });
-+            // save the transclude function from compile (which is not bound to a scope as apposed to the one from link)
-+            // we can fix this to work with the link transclude function with angular 1.2.6. as for angular 1.2.0 we need
-+            // to keep using the compile function
-+            scope.$treeTransclude = childTranscludeFn;
-+          }
-+        }
-+      };
-+    }])
-+    .directive("treeitem", function() {
-+      return {
-+        restrict: 'E',
-+        require: "^treecontrol",
-+        link: function( scope, element, attrs, treemodelCntr) {
-+          // Rendering template for the current node
-+          treemodelCntr.template(scope, function(clone) {
-+            element.html('').append(clone);
-+          });
-+        }
-+      }
-+    })
-+    .directive("treeTransclude", function() {
-+      return {
-+        link: function(scope, element, attrs, controller) {
-+          if (!scope.options.isLeaf(scope.node)) {
-+            angular.forEach(scope.expandedNodesMap, function (node, id) {
-+              if (scope.options.equality(node, scope.node)) {
-+                scope.expandedNodesMap[scope.$id] = scope.node;
-+                scope.expandedNodesMap[id] = undefined;
-+              }
-+            });
-+          }
-+          if (!scope.options.multiSelection && scope.options.equality(scope.node, scope.selectedNode)) {
-+            scope.selectedNode = scope.node;
-+          } else if (scope.options.multiSelection) {
-+            var newSelectedNodes = [];
-+            for (var i = 0; (i < scope.selectedNodes.length); i++) {
-+              if (scope.options.equality(scope.node, scope.selectedNodes[i])) {
-+                newSelectedNodes.push(scope.node);
-+              }
-+            }
-+            scope.selectedNodes = newSelectedNodes;
-+          }
-+
-+          // create a scope for the transclusion, whos parent is the parent of the tree control
-+          scope.transcludeScope = scope.parentScopeOfTree.$new();
-+          scope.transcludeScope.node = scope.node;
-+          scope.transcludeScope.$parentNode = (scope.$parent.node === scope.synteticRoot)?null:scope.$parent.node;
-+          scope.transcludeScope.$index = scope.$index;
-+          scope.transcludeScope.$first = scope.$first;
-+          scope.transcludeScope.$middle = scope.$middle;
-+          scope.transcludeScope.$last = scope.$last;
-+          scope.transcludeScope.$odd = scope.$odd;
-+          scope.transcludeScope.$even = scope.$even;
-+          scope.$on('$destroy', function() {
-+            scope.transcludeScope.$destroy();
-+          });
-+
-+          scope.$treeTransclude(scope.transcludeScope, function(clone) {
-+            element.empty();
-+            element.append(clone);
-+          });
-+        }
-+      }
-+    });
-+})( angular );
-diff --git a/webapp/app/js/services/kylinProperties.js b/webapp/app/js/services/kylinProperties.js
-index a03403b..b1f04c0 100644
---- a/webapp/app/js/services/kylinProperties.js
-+++ b/webapp/app/js/services/kylinProperties.js
-@@ -20,6 +20,7 @@ KylinApp.service('kylinConfig', function (AdminService, $log) {
-   var _config;
-   var timezone;
-   var deployEnv;
-+  var hiveLimit;
- 
- 
-   this.init = function () {
-@@ -56,12 +57,22 @@ KylinApp.service('kylinConfig', function (AdminService, $log) {
-   }
- 
-   this.getDeployEnv = function () {
-+    this.deployEnv = this.getProperty("deploy.env");
-     if (!this.deployEnv) {
--      this.deployEnv = this.getProperty("deploy.env").trim();
-+      return "DEV";
-     }
--    return this.deployEnv.toUpperCase();
-+    return this.deployEnv.toUpperCase().trim();
-   }
- 
-+  this.getHiveLimit = function () {
-+    this.hiveLimit = this.getProperty("kylin.web.hive.limit");
-+    if (!this.hiveLimit) {
-+      return 20;
-+    }
-+    return this.hiveLimit;
-+  }
-+
-+
-   //fill config info for Config from backend
-   this.initWebConfigInfo = function () {
- 
-diff --git a/webapp/app/js/services/tables.js b/webapp/app/js/services/tables.js
-index 3b5e9f4..9b2d376 100755
---- a/webapp/app/js/services/tables.js
-+++ b/webapp/app/js/services/tables.js
-@@ -17,13 +17,14 @@
-  */
- 
- KylinApp.factory('TableService', ['$resource', function ($resource, config) {
--  return $resource(Config.service.url + 'tables/:tableName/:action', {}, {
-+  return $resource(Config.service.url + 'tables/:tableName/:action/:database', {}, {
-     list: {method: 'GET', params: {}, cache: true, isArray: true},
-     get: {method: 'GET', params: {}, isArray: false},
-     getExd: {method: 'GET', params: {action: 'exd-map'}, isArray: false},
-     reload: {method: 'PUT', params: {action: 'reload'}, isArray: false},
-     loadHiveTable: {method: 'POST', params: {}, isArray: false},
-     addStreamingSrc: {method: 'POST', params: {action:'addStreamingSrc'}, isArray: false},
--    genCardinality: {method: 'PUT', params: {action: 'cardinality'}, isArray: false}
--  });
-+    genCardinality: {method: 'PUT', params: {action: 'cardinality'}, isArray: false},
-+    showHiveDatabases: {method: 'GET', params: {action:'hive'}, cache: true, isArray: true},
-+    showHiveTables: {method: 'GET', params: {action:'hive'}, cache: true, isArray: true}  });
- }]);
-diff --git a/webapp/app/partials/tables/source_table_tree.html b/webapp/app/partials/tables/source_table_tree.html
-index 767eb43..c091dca 100755
---- a/webapp/app/partials/tables/source_table_tree.html
-+++ b/webapp/app/partials/tables/source_table_tree.html
-@@ -26,6 +26,7 @@
-         <div class="col-xs-5" style="padding-left: 0px;margin-top: 20px;">
-             <div class="pull-right">
-                 <a class="btn btn-xs btn-primary" tooltip="Load Hive Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openModal()"><i class="fa fa-download"></i></a>
-+                <a class="btn btn-xs btn-info" tooltip="Load Hive Table From Tree"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openTreeModal()"><i class="fa fa-download"></i></a>
-                 <a class="btn btn-xs btn-primary" tooltip="Add Streaming Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openStreamingSourceModal()"><i class="fa fa-area-chart"></i></a>
-             </div>
-         </div>
-@@ -47,3 +48,28 @@
- </div>
- 
- <div ng-include="'partials/tables/table_load.html'"></div>
-+
-+<script type="text/ng-template" id="addHiveTableFromTree.html">
-+  <div class="modal-header"><button class="close" type="button" data-dismiss="modal" ng-click="cancel()">×</button>
-+    <h4>Load Hive Table Metadata From Tree</h4>
-+  </div>
-+  <div class="modal-body">
-+    <span><strong>Project: </strong>{{ $parent.projectName!=null?$parent.projectName:'NULL'}}</span>
-+    <div class="form-group searchBox">
-+      <input type="text" placeholder="Filter ..." class="nav-search-input" ng-model="predicate" />
-+    </div>
-+    <loading ng-if="!hiveLoaded" text="Loading Databases..."></loading>
-+    <treecontrol class="tree-light check" tree-model="treedata" selected-nodes="selectedNodes" filter-expression="predicate" on-selection="showSelected(node)" on-node-toggle="showToggle(node)" options="treeOptions">
-+      <div ng-if="node.label==''&&node.id==0"><img src="image/ajax-loader.gif">Loading Tables...</div>
-+      <button class="btn btn-xs btn-primary" ng-if="node.label==''&&node.id==65535" ng-click="showMoreClicked($parentNode)">Show More</button>
-+      <button class="btn btn-xs btn-primary" ng-if="node.label==''&&node.id==65535" ng-click="showAllClicked($parentNode)">Show All</button>
-+      {{node.label}}
-+    </treecontrol>
-+  </div>
-+
-+  <div class="modal-footer">
-+    <button class="btn btn-primary" ng-click="add()">Sync</button>
-+    <button class="btn btn-primary" ng-click="cancel()">Cancel</button>
-+  </div>
-+
-+</script>
-diff --git a/webapp/bower.json b/webapp/bower.json
-index 41144f9..bba4a52 100755
---- a/webapp/bower.json
-+++ b/webapp/bower.json
-@@ -32,7 +32,8 @@
-     "bootstrap-sweetalert": "~0.4.3",
-     "angular-toggle-switch":"1.3.0",
-     "angular-ui-select": "0.13.2",
--    "angular-sanitize": "1.2.18"
-+    "angular-sanitize": "1.2.18",
-+    "angular-tree-control": "0.2.8"
-   },
-   "devDependencies": {
-     "less.js": "~1.4.0",
-diff --git a/webapp/grunt.json b/webapp/grunt.json
-index 3219b5e..86ad1dc 100755
---- a/webapp/grunt.json
-+++ b/webapp/grunt.json
-@@ -19,7 +19,6 @@
-                 "app/components/angularLocalStorage/src/angularLocalStorage.js",
-                 "app/components/angular-base64/angular-base64.min.js",
-                 "app/components/ng-grid/build/ng-grid.js",
--                "app/components/angular-tree-control/angular-tree-control.js",
-                 "app/components/ace-builds/src-min-noconflict/ace.js",
-                 "app/components/ace-builds/src-min-noconflict/ext-language_tools.js",
-                 "app/components/ace-builds/src-min-noconflict/mode-json.js",
--- 
-2.5.4 (Apple Git-61)
-


[10/43] kylin git commit: KYLIN-1054 Update beeline params in testcases

Posted by sh...@apache.org.
KYLIN-1054 Update beeline params in testcases


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/cf05409c
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/cf05409c
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/cf05409c

Branch: refs/heads/helix-rebase
Commit: cf05409c75339356fe8af1661a6b6c6790a7192c
Parents: 098a853
Author: lidongsjtu <li...@apache.org>
Authored: Tue Mar 1 19:13:24 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Tue Mar 1 19:13:24 2016 +0800

----------------------------------------------------------------------
 examples/test_case_data/sandbox/hive-site.xml    | 2 +-
 examples/test_case_data/sandbox/kylin.properties | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/cf05409c/examples/test_case_data/sandbox/hive-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/hive-site.xml b/examples/test_case_data/sandbox/hive-site.xml
index f4c7738..1e78107 100644
--- a/examples/test_case_data/sandbox/hive-site.xml
+++ b/examples/test_case_data/sandbox/hive-site.xml
@@ -533,7 +533,7 @@
 
     <property>
         <name>hive.server2.enable.doAs</name>
-        <value>false</value>
+        <value>true</value>
     </property>
 
     <property>

http://git-wip-us.apache.org/repos/asf/kylin/blob/cf05409c/examples/test_case_data/sandbox/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
index a304cab..0c68a7e 100644
--- a/examples/test_case_data/sandbox/kylin.properties
+++ b/examples/test_case_data/sandbox/kylin.properties
@@ -21,7 +21,7 @@ kylin.storage.url=hbase
 kylin.hdfs.working.dir=/kylin
 
 # Parameters for beeline client
-kylin.hive.beeline.params=--hiveconf hive.security.authorization.sqlstd.confwhitelist.append='mapreduce.job.*|dfs.*' -u 'jdbc:hive2://localhost:10000'
+kylin.hive.beeline.params=-n root --hiveconf hive.security.authorization.sqlstd.confwhitelist.append='mapreduce.job.*|dfs.*' -u 'jdbc:hive2://localhost:10000'
 
 kylin.job.mapreduce.default.reduce.input.mb=500
 


[11/43] kylin git commit: KYLIN-1074 support load hive table from listed tree.

Posted by sh...@apache.org.
KYLIN-1074 support load hive table from listed tree.


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/bc7d4f58
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/bc7d4f58
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/bc7d4f58

Branch: refs/heads/helix-rebase
Commit: bc7d4f5846d52a17873738047e117e9410d17823
Parents: cf05409
Author: Jason <ji...@163.com>
Authored: Wed Mar 2 15:18:31 2016 +0800
Committer: Jason <ji...@163.com>
Committed: Wed Mar 2 15:18:55 2016 +0800

----------------------------------------------------------------------
 ...port-load-hive-table-from-listed-tree-.patch | 864 +++++++++++++++++++
 build/conf/kylin.properties                     |   2 +
 .../test_case_data/sandbox/kylin.properties     |   1 +
 pom.xml                                         |   1 +
 .../kylin/rest/controller/TableController.java  |  44 +
 .../apache/kylin/source/hive/HiveClient.java    |   8 +
 webapp/app/index.html                           |   1 +
 webapp/app/js/controllers/sourceMeta.js         | 185 +++-
 .../app/js/directives/angular-tree-control.js   | 363 ++++++++
 webapp/app/js/services/kylinProperties.js       |  12 +-
 webapp/app/js/services/tables.js                |   6 +-
 .../app/partials/tables/source_table_tree.html  |  26 +
 webapp/bower.json                               |   3 +-
 webapp/grunt.json                               |   1 -
 14 files changed, 1509 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/0001-KYLIN-1074-support-load-hive-table-from-listed-tree-.patch
----------------------------------------------------------------------
diff --git a/0001-KYLIN-1074-support-load-hive-table-from-listed-tree-.patch b/0001-KYLIN-1074-support-load-hive-table-from-listed-tree-.patch
new file mode 100644
index 0000000..31cc017
--- /dev/null
+++ b/0001-KYLIN-1074-support-load-hive-table-from-listed-tree-.patch
@@ -0,0 +1,864 @@
+From 1a79ef1aec557259f9611f5b3199c2e90400be77 Mon Sep 17 00:00:00 2001
+From: Jason <ji...@163.com>
+Date: Wed, 2 Mar 2016 14:40:19 +0800
+Subject: [PATCH] KYLIN-1074 support load hive table from listed tree, patch
+ from @nichunen
+
+---
+ build/conf/kylin.properties                        |   2 +
+ examples/test_case_data/sandbox/kylin.properties   |   1 +
+ pom.xml                                            |   2 +
+ .../kylin/rest/controller/TableController.java     |  44 +++
+ .../org/apache/kylin/source/hive/HiveClient.java   |   8 +
+ webapp/app/index.html                              |   1 +
+ webapp/app/js/controllers/sourceMeta.js            | 186 ++++++++++-
+ webapp/app/js/directives/angular-tree-control.js   | 363 +++++++++++++++++++++
+ webapp/app/js/services/kylinProperties.js          |  15 +-
+ webapp/app/js/services/tables.js                   |   7 +-
+ webapp/app/partials/tables/source_table_tree.html  |  26 ++
+ webapp/bower.json                                  |   3 +-
+ webapp/grunt.json                                  |   1 -
+ 13 files changed, 649 insertions(+), 10 deletions(-)
+ create mode 100644 webapp/app/js/directives/angular-tree-control.js
+
+diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
+index a4b8c3b..e8add7c 100644
+--- a/build/conf/kylin.properties
++++ b/build/conf/kylin.properties
+@@ -158,3 +158,5 @@ deploy.env=DEV
+ 
+ ###########################deprecated configs#######################
+ kylin.sandbox=true
++
++kylin.web.hive.limit=20
+\ No newline at end of file
+diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
+index 9451b78..1a74b80 100644
+--- a/examples/test_case_data/sandbox/kylin.properties
++++ b/examples/test_case_data/sandbox/kylin.properties
+@@ -131,3 +131,4 @@ kylin.web.contact_mail=
+ deploy.env=DEV
+ 
+ 
++kylin.web.hive.limit=20
+\ No newline at end of file
+diff --git a/pom.xml b/pom.xml
+index 9d9a54b..537693f 100644
+--- a/pom.xml
++++ b/pom.xml
+@@ -774,6 +774,8 @@
+                                 <!-- MIT license -->
+                                 <exclude>webapp/app/css/AdminLTE.css</exclude>
+                                 <exclude>webapp/app/js/directives/kylin_abn_tree_directive.js</exclude>
++                                <exclude>webapp/app/js/directives/angular-tree-control.js</exclude>
++
+ 
+                                 <!--configuration file -->
+                                 <exclude>webapp/app/routes.json</exclude>
+diff --git a/server/src/main/java/org/apache/kylin/rest/controller/TableController.java b/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
+index 39af7db..ea5fdd4 100644
+--- a/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
++++ b/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
+@@ -33,6 +33,7 @@ import org.apache.kylin.rest.request.CardinalityRequest;
+ import org.apache.kylin.rest.request.StreamingRequest;
+ import org.apache.kylin.rest.response.TableDescResponse;
+ import org.apache.kylin.rest.service.CubeService;
++import org.apache.kylin.source.hive.HiveClient;
+ import org.slf4j.Logger;
+ import org.slf4j.LoggerFactory;
+ import org.springframework.beans.factory.annotation.Autowired;
+@@ -205,6 +206,49 @@ public class TableController extends BasicController {
+         return descs;
+     }
+ 
++    /**
++     * Show all databases in Hive
++     *
++     * @return Hive databases list
++     * @throws IOException
++     */
++    @RequestMapping(value = "/hive", method = { RequestMethod.GET })
++    @ResponseBody
++    private static List<String> showHiveDatabases() throws IOException {
++        HiveClient hiveClient = new HiveClient();
++        List<String> results = null;
++
++        try {
++            results = hiveClient.getHiveDbNames();
++        } catch (Exception e) {
++            e.printStackTrace();
++            throw new IOException(e);
++        }
++        return results;
++    }
++
++    /**
++     * Show all tables in a Hive database
++     *
++     * @return Hive table list
++     * @throws IOException
++     */
++    @RequestMapping(value = "/hive/{database}", method = { RequestMethod.GET })
++    @ResponseBody
++    private static List<String> showHiveTables(@PathVariable String database) throws IOException {
++        HiveClient hiveClient = new HiveClient();
++        List<String> results = null;
++
++        try {
++            results = hiveClient.getHiveTableNames(database);
++        } catch (Exception e) {
++            e.printStackTrace();
++            throw new IOException(e);
++        }
++        return results;
++    }
++
++
+     public void setCubeService(CubeService cubeService) {
+         this.cubeMgmtService = cubeService;
+     }
+diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
+index 178889e..a99b304 100644
+--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
++++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
+@@ -132,6 +132,14 @@ public class HiveClient {
+         return getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.NUM_FILES);
+     }
+ 
++    public List<String> getHiveDbNames() throws Exception {
++        return getMetaStoreClient().getAllDatabases();
++    }
++
++    public List<String> getHiveTableNames(String database) throws Exception {
++        return getMetaStoreClient().getAllTables(database);
++    }
++
+     /**
+      * COPIED FROM org.apache.hadoop.hive.ql.stats.StatsUtil for backward compatibility
+      * 
+diff --git a/webapp/app/index.html b/webapp/app/index.html
+index 11ca283..b4eb9d7 100644
+--- a/webapp/app/index.html
++++ b/webapp/app/index.html
+@@ -113,6 +113,7 @@
+ <script src="js/filters/filter.js"></script>
+ <script src="js/directives/directives.js"></script>
+ <script src="js/directives/kylin_abn_tree_directive.js"></script>
++<script src="js/directives/angular-tree-control.js"></script>
+ <script src="js/factories/graph.js"></script>
+ <script src="js/services/cache.js"></script>
+ <script src="js/services/message.js"></script>
+diff --git a/webapp/app/js/controllers/sourceMeta.js b/webapp/app/js/controllers/sourceMeta.js
+index abdeeb8..c87d6ef 100755
+--- a/webapp/app/js/controllers/sourceMeta.js
++++ b/webapp/app/js/controllers/sourceMeta.js
+@@ -19,14 +19,14 @@
+ 'use strict';
+ 
+ KylinApp
+-  .controller('SourceMetaCtrl', function ($scope, $cacheFactory, $q, $window, $routeParams, CubeService, $modal, TableService, $route, loadingRequest, SweetAlert, tableConfig, TableModel,cubeConfig) {
++  .controller('SourceMetaCtrl', function ($scope, $cacheFactory, $q, $window, $routeParams, CubeService, $modal, TableService, $route, loadingRequest, SweetAlert, tableConfig, TableModel,cubeConfig,kylinConfig) {
+     var $httpDefaultCache = $cacheFactory.get('$http');
+     $scope.tableModel = TableModel;
+     $scope.tableModel.selectedSrcDb = [];
+     $scope.tableModel.selectedSrcTable = {};
+     $scope.window = 0.68 * $window.innerHeight;
+     $scope.tableConfig = tableConfig;
+-
++    $scope.kylinConfig = kylinConfig;
+ 
+     $scope.state = {
+       filterAttr: 'id', filterReverse: false, reverseColumn: 'id',
+@@ -100,13 +100,193 @@ KylinApp
+       });
+     };
+ 
+-    var ModalInstanceCtrl = function ($scope, $location, $modalInstance, tableNames, MessageService, projectName, scope) {
++    $scope.openTreeModal = function () {
++      $modal.open({
++        templateUrl: 'addHiveTableFromTree.html',
++        controller: ModalInstanceCtrl,
++        resolve: {
++          tableNames: function () {
++            return $scope.tableNames;
++          },
++          projectName:function(){
++            return  $scope.projectModel.selectedProject;
++          },
++          scope: function () {
++            return $scope;
++          }
++        }
++      });
++    };
++
++    var ModalInstanceCtrl = function ($scope, $location, $modalInstance, tableNames, MessageService, projectName, scope,kylinConfig) {
+       $scope.tableNames = "";
+       $scope.projectName = projectName;
+       $scope.cancel = function () {
+         $modalInstance.dismiss('cancel');
+       };
++
++      $scope.kylinConfig = kylinConfig;
++
++
++      $scope.treeOptions = {multiSelection: true};
++      $scope.selectedNodes = [];
++      $scope.hiveLimit =  kylinConfig.getHiveLimit();
++
++      $scope.loadHive = function () {
++        if($scope.hiveLoaded)
++          return;
++        TableService.showHiveDatabases({}, function (databases) {
++          $scope.dbNum = databases.length;
++          if (databases.length > 0) {
++            $scope.hiveMap = {};
++            for (var i = 0; i < databases.length; i++) {
++              var dbName = databases[i];
++              var hiveData = {"dbname":dbName,"tables":[],"expanded":false};
++              $scope.hive.push(hiveData);
++              $scope.hiveMap[dbName] = i;
++            }
++          }
++          $scope.hiveLoaded = true;
++          $scope.showMoreDatabases();
++        });
++      }
++
++      $scope.showMoreTables = function(hiveTables, node){
++        var shownTimes = parseInt(node.children.length / $scope.hiveLimit);
++        var from = $scope.hiveLimit * shownTimes;
++        var to = 0;
++        var hasMore = false;
++        if(from + $scope.hiveLimit > hiveTables.length) {
++          to = hiveTables.length - 1;
++        } else {
++          to = from + $scope.hiveLimit - 1;
++          hasMore = true;
++        }
++        if(!angular.isUndefined(node.children[from])){
++          node.children.pop();
++        }
++
++        for(var idx = from; idx <= to; idx++){
++          node.children.push({"label":node.label+'.'+hiveTables[idx],"id":idx-from+1,"children":[]});
++        }
++
++        if(hasMore){
++          var loading = {"label":"","id":65535,"children":[]};
++          node.children.push(loading);
++        }
++      }
++
++      $scope.showAllTables = function(hiveTables, node){
++        var shownTimes = parseInt(node.children.length / $scope.hiveLimit);
++        var from = $scope.hiveLimit * shownTimes;
++        var to = hiveTables.length - 1;
++        if(!angular.isUndefined(node.children[from])){
++          node.children.pop();
++        }
++        for(var idx = from; idx <= to; idx++){
++          node.children.push({"label":node.label+'.'+hiveTables[idx],"id":idx-from+1,"children":[]});
++        }
++      }
++
++      $scope.showMoreDatabases = function(){
++        var shownTimes = parseInt($scope.treedata.length / $scope.hiveLimit);
++        var from = $scope.hiveLimit * shownTimes;
++        var to = 0;
++        var hasMore = false;
++        if(from + $scope.hiveLimit > $scope.hive.length) {
++          to = $scope.hive.length - 1;
++        } else {
++          to = from + $scope.hiveLimit - 1;
++          hasMore = true;
++        }
++        if(!angular.isUndefined($scope.treedata[from])){
++          $scope.treedata.pop();
++        }
++
++        for(var idx = from; idx <= to; idx++){
++          var children = [];
++          var loading = {"label":"","id":0,"children":[]};
++          children.push(loading);
++          $scope.treedata.push({"label":$scope.hive[idx].dbname,"id":idx+1,"children":children,"expanded":false});
++        }
++
++        if(hasMore){
++          var loading = {"label":"","id":65535,"children":[0]};
++          $scope.treedata.push(loading);
++        }
++      }
++
++      $scope.showAllDatabases = function(){
++        var shownTimes = parseInt($scope.treedata.length / $scope.hiveLimit);
++        var from = $scope.hiveLimit * shownTimes;
++        var to = $scope.hive.length - 1;
++
++        if(!angular.isUndefined($scope.treedata[from])){
++          $scope.treedata.pop();
++        }
++
++        for(var idx = from; idx <= to; idx++){
++          var children = [];
++          var loading = {"label":"","id":0,"children":[]};
++          children.push(loading);
++          $scope.treedata.push({"label":$scope.hive[idx].dbname,"id":idx+1,"children":children,"expanded":false});
++        }
++      }
++
++      $scope.showMoreClicked = function($parentNode){
++        if($parentNode == null){
++          $scope.showMoreDatabases();
++        } else {
++          $scope.showMoreTables($scope.hive[$scope.hiveMap[$parentNode.label]].tables,$parentNode);
++        }
++      }
++
++      $scope.showAllClicked = function($parentNode){
++        if($parentNode == null){
++          $scope.showAllDatabases();
++        } else {
++          $scope.showAllTables($scope.hive[$scope.hiveMap[$parentNode.label]].tables,$parentNode);
++        }
++      }
++
++      $scope.showToggle = function(node) {
++        if(node.expanded == false){
++          TableService.showHiveTables({"database": node.label},function (hive_tables){
++            var tables = [];
++            for (var i = 0; i < hive_tables.length; i++) {
++              tables.push(hive_tables[i]);
++            }
++            $scope.hive[$scope.hiveMap[node.label]].tables = tables;
++            $scope.showMoreTables(tables,node);
++            node.expanded = true;
++          });
++        }
++      }
++
++      $scope.showSelected = function(node) {
++
++      }
++
++      if(angular.isUndefined($scope.hive) || angular.isUndefined($scope.hiveLoaded) || angular.isUndefined($scope.treedata) ){
++        $scope.hive = [];
++        $scope.hiveLoaded = false;
++        $scope.treedata = [];
++        $scope.loadHive();
++      }
++
++
++
++
+       $scope.add = function () {
++
++        if($scope.tableNames.length === 0 && $scope.selectedNodes.length > 0) {
++          for(var i = 0; i <  $scope.selectedNodes.length; i++){
++            if($scope.selectedNodes[i].label.indexOf(".") >= 0){
++              $scope.tableNames += ($scope.selectedNodes[i].label) += ',';
++            }
++          }
++        }
++
+         if ($scope.tableNames.trim() === "") {
+           SweetAlert.swal('', 'Please input table(s) you want to synchronize.', 'info');
+           return;
+diff --git a/webapp/app/js/directives/angular-tree-control.js b/webapp/app/js/directives/angular-tree-control.js
+new file mode 100644
+index 0000000..6fca987
+--- /dev/null
++++ b/webapp/app/js/directives/angular-tree-control.js
+@@ -0,0 +1,363 @@
++/*
++ * The MIT License (MIT)
++ *
++ * Copyright (c) 2013 Steve
++ *
++ * Permission is hereby granted, free of charge, to any person obtaining a copy of
++ * this software and associated documentation files (the "Software"), to deal in
++ * the Software without restriction, including without limitation the rights to
++ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
++ * the Software, and to permit persons to whom the Software is furnished to do so,
++ *   subject to the following conditions:
++ *
++ *   The above copyright notice and this permission notice shall be included in all
++ * copies or substantial portions of the Software.
++ *
++ *   THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
++ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
++ * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
++ * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
++ * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
++ * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
++ */
++
++(function ( angular ) {
++  'use strict';
++
++  angular.module( 'treeControl', [] )
++    .directive( 'treecontrol', ['$compile', function( $compile ) {
++      /**
++       * @param cssClass - the css class
++       * @param addClassProperty - should we wrap the class name with class=""
++       */
++      function classIfDefined(cssClass, addClassProperty) {
++        if (cssClass) {
++          if (addClassProperty)
++            return 'class="' + cssClass + '"';
++          else
++            return cssClass;
++        }
++        else
++          return "";
++      }
++
++      function ensureDefault(obj, prop, value) {
++        if (!obj.hasOwnProperty(prop))
++          obj[prop] = value;
++      }
++
++      return {
++        restrict: 'EA',
++        require: "treecontrol",
++        transclude: true,
++        scope: {
++          treeModel: "=",
++          selectedNode: "=?",
++          selectedNodes: "=?",
++          expandedNodes: "=?",
++          onSelection: "&",
++          onNodeToggle: "&",
++          options: "=?",
++          orderBy: "@",
++          reverseOrder: "@",
++          filterExpression: "=?",
++          filterComparator: "=?",
++          onDblclick: "&"
++        },
++        controller: ['$scope', function( $scope ) {
++
++          function defaultIsLeaf(node) {
++            return !node[$scope.options.nodeChildren] || node[$scope.options.nodeChildren].length === 0;
++          }
++
++          function shallowCopy(src, dst) {
++            if (angular.isArray(src)) {
++              dst = dst || [];
++
++              for ( var i = 0; i < src.length; i++) {
++                dst[i] = src[i];
++              }
++            } else if (angular.isObject(src)) {
++              dst = dst || {};
++
++              for (var key in src) {
++                if (hasOwnProperty.call(src, key) && !(key.charAt(0) === '$' && key.charAt(1) === '$')) {
++                  dst[key] = src[key];
++                }
++              }
++            }
++
++            return dst || src;
++          }
++          function defaultEquality(a, b) {
++            if (a === undefined || b === undefined)
++              return false;
++            a = shallowCopy(a);
++            a[$scope.options.nodeChildren] = [];
++            b = shallowCopy(b);
++            b[$scope.options.nodeChildren] = [];
++            return angular.equals(a, b);
++          }
++
++          $scope.options = $scope.options || {};
++          ensureDefault($scope.options, "multiSelection", false);
++          ensureDefault($scope.options, "nodeChildren", "children");
++          ensureDefault($scope.options, "dirSelectable", "true");
++          ensureDefault($scope.options, "injectClasses", {});
++          ensureDefault($scope.options.injectClasses, "ul", "");
++          ensureDefault($scope.options.injectClasses, "li", "");
++          ensureDefault($scope.options.injectClasses, "liSelected", "");
++          ensureDefault($scope.options.injectClasses, "iExpanded", "");
++          ensureDefault($scope.options.injectClasses, "iCollapsed", "");
++          ensureDefault($scope.options.injectClasses, "iLeaf", "");
++          ensureDefault($scope.options.injectClasses, "label", "");
++          ensureDefault($scope.options.injectClasses, "labelSelected", "");
++          ensureDefault($scope.options, "equality", defaultEquality);
++          ensureDefault($scope.options, "isLeaf", defaultIsLeaf);
++
++          $scope.selectedNodes = $scope.selectedNodes || [];
++          $scope.expandedNodes = $scope.expandedNodes || [];
++          $scope.expandedNodesMap = {};
++          for (var i=0; i < $scope.expandedNodes.length; i++) {
++            $scope.expandedNodesMap[""+i] = $scope.expandedNodes[i];
++          }
++          $scope.parentScopeOfTree = $scope.$parent;
++
++
++          function isSelectedNode(node) {
++            if (!$scope.options.multiSelection && ($scope.options.equality(node, $scope.selectedNode)))
++              return true;
++            else if ($scope.options.multiSelection && $scope.selectedNodes) {
++              for (var i = 0; (i < $scope.selectedNodes.length); i++) {
++                if ($scope.options.equality(node, $scope.selectedNodes[i])) {
++                  return true;
++                }
++              }
++              return false;
++            }
++          }
++
++          $scope.headClass = function(node) {
++            var liSelectionClass = classIfDefined($scope.options.injectClasses.liSelected, false);
++            var injectSelectionClass = "";
++            if (liSelectionClass && isSelectedNode(node))
++              injectSelectionClass = " " + liSelectionClass;
++            if ($scope.options.isLeaf(node))
++              return "tree-leaf" + injectSelectionClass;
++            if ($scope.expandedNodesMap[this.$id])
++              return "tree-expanded" + injectSelectionClass;
++            else
++              return "tree-collapsed" + injectSelectionClass;
++          };
++
++          $scope.iBranchClass = function() {
++            if ($scope.expandedNodesMap[this.$id])
++              return classIfDefined($scope.options.injectClasses.iExpanded);
++            else
++              return classIfDefined($scope.options.injectClasses.iCollapsed);
++          };
++
++          $scope.nodeExpanded = function() {
++            return !!$scope.expandedNodesMap[this.$id];
++          };
++
++          $scope.selectNodeHead = function() {
++            var expanding = $scope.expandedNodesMap[this.$id] === undefined;
++            $scope.expandedNodesMap[this.$id] = (expanding ? this.node : undefined);
++            if (expanding) {
++              $scope.expandedNodes.push(this.node);
++            }
++            else {
++              var index;
++              for (var i=0; (i < $scope.expandedNodes.length) && !index; i++) {
++                if ($scope.options.equality($scope.expandedNodes[i], this.node)) {
++                  index = i;
++                }
++              }
++              if (index != undefined)
++                $scope.expandedNodes.splice(index, 1);
++            }
++            if ($scope.onNodeToggle)
++              $scope.onNodeToggle({node: this.node, expanded: expanding});
++          };
++
++          $scope.selectNodeLabel = function( selectedNode ){
++            if(selectedNode[$scope.options.nodeChildren] && selectedNode[$scope.options.nodeChildren].length > 0){
++              this.selectNodeHead();
++            }
++            if($scope.options.dirSelectable || !(selectedNode[$scope.options.nodeChildren] && selectedNode[$scope.options.nodeChildren].length > 0) )
++             {
++              var selected = false;
++              if ($scope.options.multiSelection) {
++                var pos = $scope.selectedNodes.indexOf(selectedNode);
++                if (pos === -1) {
++                  $scope.selectedNodes.push(selectedNode);
++                  selected = true;
++                } else {
++                  $scope.selectedNodes.splice(pos, 1);
++                }
++              } else {
++                if ($scope.selectedNode != selectedNode) {
++                  $scope.selectedNode = selectedNode;
++                  selected = true;
++                }
++                else {
++                  $scope.selectedNode = undefined;
++                }
++              }
++              if ($scope.onSelection)
++                $scope.onSelection({node: selectedNode, selected: selected});
++            }
++          };
++
++
++          $scope.dblClickNode = function(selectedNode){
++            if($scope.onDblclick!=null){
++              $scope.onDblclick({node:selectedNode});
++            }
++          }
++
++          $scope.selectedClass = function() {
++            var isThisNodeSelected = isSelectedNode(this.node);
++            var labelSelectionClass = classIfDefined($scope.options.injectClasses.labelSelected, false);
++            var injectSelectionClass = "";
++            if (labelSelectionClass && isThisNodeSelected)
++              injectSelectionClass = " " + labelSelectionClass;
++
++            return isThisNodeSelected?"tree-selected" + injectSelectionClass:"";
++          };
++
++          //tree template
++          var orderBy = $scope.orderBy ? ' | orderBy:orderBy:reverseOrder' : '';
++          var template =
++            '<ul '+classIfDefined($scope.options.injectClasses.ul, true)+'>' +
++            '<li ng-repeat="node in node.' + $scope.options.nodeChildren + ' | filter:filterExpression:filterComparator ' + orderBy + '" ng-class="headClass(node)" '+classIfDefined($scope.options.injectClasses.li, true)+'>' +
++            '<i class="tree-branch-head" ng-class="iBranchClass()" ng-click="selectNodeHead(node)"></i>' +
++            '<i class="tree-leaf-head '+classIfDefined($scope.options.injectClasses.iLeaf, false)+'"></i>' +
++            '<div class="tree-label '+classIfDefined($scope.options.injectClasses.label, false)+'" ng-class="selectedClass()" ng-click="selectNodeLabel(node)" ng-dblclick="dblClickNode(node)" tree-transclude></div>' +
++            '<treeitem ng-if="nodeExpanded()"></treeitem>' +
++            '</li>' +
++            '</ul>';
++
++          this.template = $compile(template);
++        }],
++        compile: function(element, attrs, childTranscludeFn) {
++          return function ( scope, element, attrs, treemodelCntr ) {
++
++            scope.$watch("treeModel", function updateNodeOnRootScope(newValue) {
++              if (angular.isArray(newValue)) {
++                if (angular.isDefined(scope.node) && angular.equals(scope.node[scope.options.nodeChildren], newValue))
++                  return;
++                scope.node = {};
++                scope.synteticRoot = scope.node;
++                scope.node[scope.options.nodeChildren] = newValue;
++              }
++              else {
++                if (angular.equals(scope.node, newValue))
++                  return;
++                scope.node = newValue;
++              }
++            });
++
++            scope.$watchCollection('expandedNodes', function(newValue) {
++              var notFoundIds = 0;
++              var newExpandedNodesMap = {};
++              var $liElements = element.find('li');
++              var existingScopes = [];
++              // find all nodes visible on the tree and the scope $id of the scopes including them
++              angular.forEach($liElements, function(liElement) {
++                var $liElement = angular.element(liElement);
++                var liScope = $liElement.scope();
++                existingScopes.push(liScope);
++              });
++              // iterate over the newValue, the new expanded nodes, and for each find it in the existingNodesAndScopes
++              // if found, add the mapping $id -> node into newExpandedNodesMap
++              // if not found, add the mapping num -> node into newExpandedNodesMap
++              angular.forEach(newValue, function(newExNode) {
++                var found = false;
++                for (var i=0; (i < existingScopes.length) && !found; i++) {
++                  var existingScope = existingScopes[i];
++                  if (scope.options.equality(newExNode, existingScope.node)) {
++                    newExpandedNodesMap[existingScope.$id] = existingScope.node;
++                    found = true;
++                  }
++                }
++                if (!found)
++                  newExpandedNodesMap[notFoundIds++] = newExNode;
++              });
++              scope.expandedNodesMap = newExpandedNodesMap;
++            });
++
++//                        scope.$watch('expandedNodesMap', function(newValue) {
++//
++//                        });
++
++            //Rendering template for a root node
++            treemodelCntr.template( scope, function(clone) {
++              element.html('').append( clone );
++            });
++            // save the transclude function from compile (which is not bound to a scope as apposed to the one from link)
++            // we can fix this to work with the link transclude function with angular 1.2.6. as for angular 1.2.0 we need
++            // to keep using the compile function
++            scope.$treeTransclude = childTranscludeFn;
++          }
++        }
++      };
++    }])
++    .directive("treeitem", function() {
++      return {
++        restrict: 'E',
++        require: "^treecontrol",
++        link: function( scope, element, attrs, treemodelCntr) {
++          // Rendering template for the current node
++          treemodelCntr.template(scope, function(clone) {
++            element.html('').append(clone);
++          });
++        }
++      }
++    })
++    .directive("treeTransclude", function() {
++      return {
++        link: function(scope, element, attrs, controller) {
++          if (!scope.options.isLeaf(scope.node)) {
++            angular.forEach(scope.expandedNodesMap, function (node, id) {
++              if (scope.options.equality(node, scope.node)) {
++                scope.expandedNodesMap[scope.$id] = scope.node;
++                scope.expandedNodesMap[id] = undefined;
++              }
++            });
++          }
++          if (!scope.options.multiSelection && scope.options.equality(scope.node, scope.selectedNode)) {
++            scope.selectedNode = scope.node;
++          } else if (scope.options.multiSelection) {
++            var newSelectedNodes = [];
++            for (var i = 0; (i < scope.selectedNodes.length); i++) {
++              if (scope.options.equality(scope.node, scope.selectedNodes[i])) {
++                newSelectedNodes.push(scope.node);
++              }
++            }
++            scope.selectedNodes = newSelectedNodes;
++          }
++
++          // create a scope for the transclusion, whos parent is the parent of the tree control
++          scope.transcludeScope = scope.parentScopeOfTree.$new();
++          scope.transcludeScope.node = scope.node;
++          scope.transcludeScope.$parentNode = (scope.$parent.node === scope.synteticRoot)?null:scope.$parent.node;
++          scope.transcludeScope.$index = scope.$index;
++          scope.transcludeScope.$first = scope.$first;
++          scope.transcludeScope.$middle = scope.$middle;
++          scope.transcludeScope.$last = scope.$last;
++          scope.transcludeScope.$odd = scope.$odd;
++          scope.transcludeScope.$even = scope.$even;
++          scope.$on('$destroy', function() {
++            scope.transcludeScope.$destroy();
++          });
++
++          scope.$treeTransclude(scope.transcludeScope, function(clone) {
++            element.empty();
++            element.append(clone);
++          });
++        }
++      }
++    });
++})( angular );
+diff --git a/webapp/app/js/services/kylinProperties.js b/webapp/app/js/services/kylinProperties.js
+index a03403b..b1f04c0 100644
+--- a/webapp/app/js/services/kylinProperties.js
++++ b/webapp/app/js/services/kylinProperties.js
+@@ -20,6 +20,7 @@ KylinApp.service('kylinConfig', function (AdminService, $log) {
+   var _config;
+   var timezone;
+   var deployEnv;
++  var hiveLimit;
+ 
+ 
+   this.init = function () {
+@@ -56,12 +57,22 @@ KylinApp.service('kylinConfig', function (AdminService, $log) {
+   }
+ 
+   this.getDeployEnv = function () {
++    this.deployEnv = this.getProperty("deploy.env");
+     if (!this.deployEnv) {
+-      this.deployEnv = this.getProperty("deploy.env").trim();
++      return "DEV";
+     }
+-    return this.deployEnv.toUpperCase();
++    return this.deployEnv.toUpperCase().trim();
+   }
+ 
++  this.getHiveLimit = function () {
++    this.hiveLimit = this.getProperty("kylin.web.hive.limit");
++    if (!this.hiveLimit) {
++      return 20;
++    }
++    return this.hiveLimit;
++  }
++
++
+   //fill config info for Config from backend
+   this.initWebConfigInfo = function () {
+ 
+diff --git a/webapp/app/js/services/tables.js b/webapp/app/js/services/tables.js
+index 3b5e9f4..9b2d376 100755
+--- a/webapp/app/js/services/tables.js
++++ b/webapp/app/js/services/tables.js
+@@ -17,13 +17,14 @@
+  */
+ 
+ KylinApp.factory('TableService', ['$resource', function ($resource, config) {
+-  return $resource(Config.service.url + 'tables/:tableName/:action', {}, {
++  return $resource(Config.service.url + 'tables/:tableName/:action/:database', {}, {
+     list: {method: 'GET', params: {}, cache: true, isArray: true},
+     get: {method: 'GET', params: {}, isArray: false},
+     getExd: {method: 'GET', params: {action: 'exd-map'}, isArray: false},
+     reload: {method: 'PUT', params: {action: 'reload'}, isArray: false},
+     loadHiveTable: {method: 'POST', params: {}, isArray: false},
+     addStreamingSrc: {method: 'POST', params: {action:'addStreamingSrc'}, isArray: false},
+-    genCardinality: {method: 'PUT', params: {action: 'cardinality'}, isArray: false}
+-  });
++    genCardinality: {method: 'PUT', params: {action: 'cardinality'}, isArray: false},
++    showHiveDatabases: {method: 'GET', params: {action:'hive'}, cache: true, isArray: true},
++    showHiveTables: {method: 'GET', params: {action:'hive'}, cache: true, isArray: true}  });
+ }]);
+diff --git a/webapp/app/partials/tables/source_table_tree.html b/webapp/app/partials/tables/source_table_tree.html
+index 767eb43..c091dca 100755
+--- a/webapp/app/partials/tables/source_table_tree.html
++++ b/webapp/app/partials/tables/source_table_tree.html
+@@ -26,6 +26,7 @@
+         <div class="col-xs-5" style="padding-left: 0px;margin-top: 20px;">
+             <div class="pull-right">
+                 <a class="btn btn-xs btn-primary" tooltip="Load Hive Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openModal()"><i class="fa fa-download"></i></a>
++                <a class="btn btn-xs btn-info" tooltip="Load Hive Table From Tree"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openTreeModal()"><i class="fa fa-download"></i></a>
+                 <a class="btn btn-xs btn-primary" tooltip="Add Streaming Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openStreamingSourceModal()"><i class="fa fa-area-chart"></i></a>
+             </div>
+         </div>
+@@ -47,3 +48,28 @@
+ </div>
+ 
+ <div ng-include="'partials/tables/table_load.html'"></div>
++
++<script type="text/ng-template" id="addHiveTableFromTree.html">
++  <div class="modal-header"><button class="close" type="button" data-dismiss="modal" ng-click="cancel()">×</button>
++    <h4>Load Hive Table Metadata From Tree</h4>
++  </div>
++  <div class="modal-body">
++    <span><strong>Project: </strong>{{ $parent.projectName!=null?$parent.projectName:'NULL'}}</span>
++    <div class="form-group searchBox">
++      <input type="text" placeholder="Filter ..." class="nav-search-input" ng-model="predicate" />
++    </div>
++    <loading ng-if="!hiveLoaded" text="Loading Databases..."></loading>
++    <treecontrol class="tree-light check" tree-model="treedata" selected-nodes="selectedNodes" filter-expression="predicate" on-selection="showSelected(node)" on-node-toggle="showToggle(node)" options="treeOptions">
++      <div ng-if="node.label==''&&node.id==0"><img src="image/ajax-loader.gif">Loading Tables...</div>
++      <button class="btn btn-xs btn-primary" ng-if="node.label==''&&node.id==65535" ng-click="showMoreClicked($parentNode)">Show More</button>
++      <button class="btn btn-xs btn-primary" ng-if="node.label==''&&node.id==65535" ng-click="showAllClicked($parentNode)">Show All</button>
++      {{node.label}}
++    </treecontrol>
++  </div>
++
++  <div class="modal-footer">
++    <button class="btn btn-primary" ng-click="add()">Sync</button>
++    <button class="btn btn-primary" ng-click="cancel()">Cancel</button>
++  </div>
++
++</script>
+diff --git a/webapp/bower.json b/webapp/bower.json
+index 41144f9..bba4a52 100755
+--- a/webapp/bower.json
++++ b/webapp/bower.json
+@@ -32,7 +32,8 @@
+     "bootstrap-sweetalert": "~0.4.3",
+     "angular-toggle-switch":"1.3.0",
+     "angular-ui-select": "0.13.2",
+-    "angular-sanitize": "1.2.18"
++    "angular-sanitize": "1.2.18",
++    "angular-tree-control": "0.2.8"
+   },
+   "devDependencies": {
+     "less.js": "~1.4.0",
+diff --git a/webapp/grunt.json b/webapp/grunt.json
+index 3219b5e..86ad1dc 100755
+--- a/webapp/grunt.json
++++ b/webapp/grunt.json
+@@ -19,7 +19,6 @@
+                 "app/components/angularLocalStorage/src/angularLocalStorage.js",
+                 "app/components/angular-base64/angular-base64.min.js",
+                 "app/components/ng-grid/build/ng-grid.js",
+-                "app/components/angular-tree-control/angular-tree-control.js",
+                 "app/components/ace-builds/src-min-noconflict/ace.js",
+                 "app/components/ace-builds/src-min-noconflict/ext-language_tools.js",
+                 "app/components/ace-builds/src-min-noconflict/mode-json.js",
+-- 
+2.5.4 (Apple Git-61)
+

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/build/conf/kylin.properties
----------------------------------------------------------------------
diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
index 5532339..78a564d 100644
--- a/build/conf/kylin.properties
+++ b/build/conf/kylin.properties
@@ -148,3 +148,5 @@ deploy.env=DEV
 
 ###########################deprecated configs#######################
 kylin.sandbox=true
+
+ kylin.web.hive.limit=20
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/examples/test_case_data/sandbox/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
index 0c68a7e..7c9919b 100644
--- a/examples/test_case_data/sandbox/kylin.properties
+++ b/examples/test_case_data/sandbox/kylin.properties
@@ -116,4 +116,5 @@ kylin.web.contact_mail=
 #env DEV|QA|PROD
 deploy.env=DEV
 
+ kylin.web.hive.limit=20
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 42a0c6d..2e42841 100644
--- a/pom.xml
+++ b/pom.xml
@@ -781,6 +781,7 @@
                                 <!-- MIT license -->
                                 <exclude>webapp/app/css/AdminLTE.css</exclude>
                                 <exclude>webapp/app/js/directives/kylin_abn_tree_directive.js</exclude>
+                                <exclude>webapp/app/js/directives/angular-tree-control.js</exclude>
 
                                 <!--configuration file -->
                                 <exclude>webapp/app/routes.json</exclude>

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/TableController.java b/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
index 98e8d58..bd04ad8 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/TableController.java
@@ -36,6 +36,7 @@ import org.apache.kylin.rest.response.TableDescResponse;
 import org.apache.kylin.rest.service.CubeService;
 import org.apache.kylin.rest.service.ModelService;
 import org.apache.kylin.rest.service.ProjectService;
+import org.apache.kylin.source.hive.HiveClient;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -258,6 +259,49 @@ public class TableController extends BasicController {
         return descs;
     }
 
+
+    /**
+     * Show all databases in Hive
+     *
+     * @return Hive databases list
+     * @throws IOException
+     */
+    @RequestMapping(value = "/hive", method = { RequestMethod.GET })
+    @ResponseBody
+    private static List<String> showHiveDatabases() throws IOException {
+        HiveClient hiveClient = new HiveClient();
+        List<String> results = null;
+
+        try {
+            results = hiveClient.getHiveDbNames();
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new IOException(e);
+        }
+        return results;
+    }
+
+    /**
+     * Show all tables in a Hive database
+     *
+     * @return Hive table list
+     * @throws IOException
+     */
+    @RequestMapping(value = "/hive/{database}", method = { RequestMethod.GET })
+    @ResponseBody
+    private static List<String> showHiveTables(@PathVariable String database) throws IOException {
+        HiveClient hiveClient = new HiveClient();
+        List<String> results = null;
+
+        try {
+            results = hiveClient.getHiveTableNames(database);
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new IOException(e);
+        }
+        return results;
+    }
+
     public void setCubeService(CubeService cubeService) {
         this.cubeMgmtService = cubeService;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
index 178889e..a99b304 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
@@ -132,6 +132,14 @@ public class HiveClient {
         return getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.NUM_FILES);
     }
 
+    public List<String> getHiveDbNames() throws Exception {
+        return getMetaStoreClient().getAllDatabases();
+    }
+
+    public List<String> getHiveTableNames(String database) throws Exception {
+        return getMetaStoreClient().getAllTables(database);
+    }
+
     /**
      * COPIED FROM org.apache.hadoop.hive.ql.stats.StatsUtil for backward compatibility
      * 

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/webapp/app/index.html
----------------------------------------------------------------------
diff --git a/webapp/app/index.html b/webapp/app/index.html
index 11ca283..b4eb9d7 100644
--- a/webapp/app/index.html
+++ b/webapp/app/index.html
@@ -113,6 +113,7 @@
 <script src="js/filters/filter.js"></script>
 <script src="js/directives/directives.js"></script>
 <script src="js/directives/kylin_abn_tree_directive.js"></script>
+<script src="js/directives/angular-tree-control.js"></script>
 <script src="js/factories/graph.js"></script>
 <script src="js/services/cache.js"></script>
 <script src="js/services/message.js"></script>

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/webapp/app/js/controllers/sourceMeta.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/controllers/sourceMeta.js b/webapp/app/js/controllers/sourceMeta.js
index cbd9f52..69f1a44 100755
--- a/webapp/app/js/controllers/sourceMeta.js
+++ b/webapp/app/js/controllers/sourceMeta.js
@@ -100,6 +100,24 @@ KylinApp
       });
     };
 
+    $scope.openTreeModal = function () {
+      $modal.open({
+        templateUrl: 'addHiveTableFromTree.html',
+        controller: ModalInstanceCtrl,
+        resolve: {
+          tableNames: function () {
+            return $scope.tableNames;
+          },
+          projectName:function(){
+            return  $scope.projectModel.selectedProject;
+          },
+          scope: function () {
+            return $scope;
+          }
+        }
+      });
+    };
+
     $scope.openUnLoadModal = function () {
       $modal.open({
         templateUrl: 'removeHiveTable.html',
@@ -119,13 +137,175 @@ KylinApp
       });
     };
 
-    var ModalInstanceCtrl = function ($scope, $location, $modalInstance, tableNames, MessageService, projectName, scope) {
+    var ModalInstanceCtrl = function ($scope, $location, $modalInstance, tableNames, MessageService, projectName, scope,kylinConfig) {
       $scope.tableNames = "";
       $scope.projectName = projectName;
       $scope.cancel = function () {
         $modalInstance.dismiss('cancel');
       };
+
+      $scope.kylinConfig = kylinConfig;
+
+
+      $scope.treeOptions = {multiSelection: true};
+      $scope.selectedNodes = [];
+      $scope.hiveLimit =  kylinConfig.getHiveLimit();
+
+      $scope.loadHive = function () {
+        if($scope.hiveLoaded)
+          return;
+        TableService.showHiveDatabases({}, function (databases) {
+          $scope.dbNum = databases.length;
+          if (databases.length > 0) {
+            $scope.hiveMap = {};
+            for (var i = 0; i < databases.length; i++) {
+              var dbName = databases[i];
+              var hiveData = {"dbname":dbName,"tables":[],"expanded":false};
+              $scope.hive.push(hiveData);
+              $scope.hiveMap[dbName] = i;
+            }
+          }
+          $scope.hiveLoaded = true;
+          $scope.showMoreDatabases();
+        });
+      }
+
+      $scope.showMoreTables = function(hiveTables, node){
+        var shownTimes = parseInt(node.children.length / $scope.hiveLimit);
+        var from = $scope.hiveLimit * shownTimes;
+        var to = 0;
+        var hasMore = false;
+        if(from + $scope.hiveLimit > hiveTables.length) {
+          to = hiveTables.length - 1;
+        } else {
+          to = from + $scope.hiveLimit - 1;
+          hasMore = true;
+        }
+        if(!angular.isUndefined(node.children[from])){
+          node.children.pop();
+        }
+
+        for(var idx = from; idx <= to; idx++){
+          node.children.push({"label":node.label+'.'+hiveTables[idx],"id":idx-from+1,"children":[]});
+        }
+
+        if(hasMore){
+          var loading = {"label":"","id":65535,"children":[]};
+          node.children.push(loading);
+        }
+      }
+
+      $scope.showAllTables = function(hiveTables, node){
+        var shownTimes = parseInt(node.children.length / $scope.hiveLimit);
+        var from = $scope.hiveLimit * shownTimes;
+        var to = hiveTables.length - 1;
+        if(!angular.isUndefined(node.children[from])){
+          node.children.pop();
+        }
+        for(var idx = from; idx <= to; idx++){
+          node.children.push({"label":node.label+'.'+hiveTables[idx],"id":idx-from+1,"children":[]});
+        }
+      }
+
+      $scope.showMoreDatabases = function(){
+        var shownTimes = parseInt($scope.treedata.length / $scope.hiveLimit);
+        var from = $scope.hiveLimit * shownTimes;
+        var to = 0;
+        var hasMore = false;
+        if(from + $scope.hiveLimit > $scope.hive.length) {
+          to = $scope.hive.length - 1;
+        } else {
+          to = from + $scope.hiveLimit - 1;
+          hasMore = true;
+        }
+        if(!angular.isUndefined($scope.treedata[from])){
+          $scope.treedata.pop();
+        }
+
+        for(var idx = from; idx <= to; idx++){
+          var children = [];
+          var loading = {"label":"","id":0,"children":[]};
+          children.push(loading);
+          $scope.treedata.push({"label":$scope.hive[idx].dbname,"id":idx+1,"children":children,"expanded":false});
+        }
+
+        if(hasMore){
+          var loading = {"label":"","id":65535,"children":[0]};
+          $scope.treedata.push(loading);
+        }
+      }
+
+      $scope.showAllDatabases = function(){
+        var shownTimes = parseInt($scope.treedata.length / $scope.hiveLimit);
+        var from = $scope.hiveLimit * shownTimes;
+        var to = $scope.hive.length - 1;
+
+        if(!angular.isUndefined($scope.treedata[from])){
+          $scope.treedata.pop();
+        }
+
+        for(var idx = from; idx <= to; idx++){
+          var children = [];
+          var loading = {"label":"","id":0,"children":[]};
+          children.push(loading);
+          $scope.treedata.push({"label":$scope.hive[idx].dbname,"id":idx+1,"children":children,"expanded":false});
+        }
+      }
+
+      $scope.showMoreClicked = function($parentNode){
+        if($parentNode == null){
+          $scope.showMoreDatabases();
+        } else {
+          $scope.showMoreTables($scope.hive[$scope.hiveMap[$parentNode.label]].tables,$parentNode);
+        }
+      }
+
+      $scope.showAllClicked = function($parentNode){
+        if($parentNode == null){
+          $scope.showAllDatabases();
+        } else {
+          $scope.showAllTables($scope.hive[$scope.hiveMap[$parentNode.label]].tables,$parentNode);
+        }
+      }
+
+      $scope.showToggle = function(node) {
+        if(node.expanded == false){
+          TableService.showHiveTables({"database": node.label},function (hive_tables){
+            var tables = [];
+            for (var i = 0; i < hive_tables.length; i++) {
+              tables.push(hive_tables[i]);
+            }
+            $scope.hive[$scope.hiveMap[node.label]].tables = tables;
+            $scope.showMoreTables(tables,node);
+            node.expanded = true;
+          });
+        }
+      }
+
+      $scope.showSelected = function(node) {
+
+      }
+
+      if(angular.isUndefined($scope.hive) || angular.isUndefined($scope.hiveLoaded) || angular.isUndefined($scope.treedata) ){
+        $scope.hive = [];
+        $scope.hiveLoaded = false;
+        $scope.treedata = [];
+        $scope.loadHive();
+      }
+
+
+
+
       $scope.add = function () {
+
+        if($scope.tableNames.length === 0 && $scope.selectedNodes.length > 0) {
+          for(var i = 0; i <  $scope.selectedNodes.length; i++){
+            if($scope.selectedNodes[i].label.indexOf(".") >= 0){
+              $scope.tableNames += ($scope.selectedNodes[i].label) += ',';
+            }
+          }
+        }
+
         if ($scope.tableNames.trim() === "") {
           SweetAlert.swal('', 'Please input table(s) you want to synchronize.', 'info');
           return;
@@ -172,7 +352,8 @@ KylinApp
         })
       }
 
-      $scope.remove = function () {
+
+    $scope.remove = function () {
         if ($scope.tableNames.trim() === "") {
           SweetAlert.swal('', 'Please input table(s) you want to synchronize.', 'info');
           return;

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/webapp/app/js/directives/angular-tree-control.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/directives/angular-tree-control.js b/webapp/app/js/directives/angular-tree-control.js
new file mode 100644
index 0000000..6fca987
--- /dev/null
+++ b/webapp/app/js/directives/angular-tree-control.js
@@ -0,0 +1,363 @@
+/*
+ * The MIT License (MIT)
+ *
+ * Copyright (c) 2013 Steve
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+ * the Software, and to permit persons to whom the Software is furnished to do so,
+ *   subject to the following conditions:
+ *
+ *   The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ *   THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+ * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+ * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+ * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+ * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+(function ( angular ) {
+  'use strict';
+
+  angular.module( 'treeControl', [] )
+    .directive( 'treecontrol', ['$compile', function( $compile ) {
+      /**
+       * @param cssClass - the css class
+       * @param addClassProperty - should we wrap the class name with class=""
+       */
+      function classIfDefined(cssClass, addClassProperty) {
+        if (cssClass) {
+          if (addClassProperty)
+            return 'class="' + cssClass + '"';
+          else
+            return cssClass;
+        }
+        else
+          return "";
+      }
+
+      function ensureDefault(obj, prop, value) {
+        if (!obj.hasOwnProperty(prop))
+          obj[prop] = value;
+      }
+
+      return {
+        restrict: 'EA',
+        require: "treecontrol",
+        transclude: true,
+        scope: {
+          treeModel: "=",
+          selectedNode: "=?",
+          selectedNodes: "=?",
+          expandedNodes: "=?",
+          onSelection: "&",
+          onNodeToggle: "&",
+          options: "=?",
+          orderBy: "@",
+          reverseOrder: "@",
+          filterExpression: "=?",
+          filterComparator: "=?",
+          onDblclick: "&"
+        },
+        controller: ['$scope', function( $scope ) {
+
+          function defaultIsLeaf(node) {
+            return !node[$scope.options.nodeChildren] || node[$scope.options.nodeChildren].length === 0;
+          }
+
+          function shallowCopy(src, dst) {
+            if (angular.isArray(src)) {
+              dst = dst || [];
+
+              for ( var i = 0; i < src.length; i++) {
+                dst[i] = src[i];
+              }
+            } else if (angular.isObject(src)) {
+              dst = dst || {};
+
+              for (var key in src) {
+                if (hasOwnProperty.call(src, key) && !(key.charAt(0) === '$' && key.charAt(1) === '$')) {
+                  dst[key] = src[key];
+                }
+              }
+            }
+
+            return dst || src;
+          }
+          function defaultEquality(a, b) {
+            if (a === undefined || b === undefined)
+              return false;
+            a = shallowCopy(a);
+            a[$scope.options.nodeChildren] = [];
+            b = shallowCopy(b);
+            b[$scope.options.nodeChildren] = [];
+            return angular.equals(a, b);
+          }
+
+          $scope.options = $scope.options || {};
+          ensureDefault($scope.options, "multiSelection", false);
+          ensureDefault($scope.options, "nodeChildren", "children");
+          ensureDefault($scope.options, "dirSelectable", "true");
+          ensureDefault($scope.options, "injectClasses", {});
+          ensureDefault($scope.options.injectClasses, "ul", "");
+          ensureDefault($scope.options.injectClasses, "li", "");
+          ensureDefault($scope.options.injectClasses, "liSelected", "");
+          ensureDefault($scope.options.injectClasses, "iExpanded", "");
+          ensureDefault($scope.options.injectClasses, "iCollapsed", "");
+          ensureDefault($scope.options.injectClasses, "iLeaf", "");
+          ensureDefault($scope.options.injectClasses, "label", "");
+          ensureDefault($scope.options.injectClasses, "labelSelected", "");
+          ensureDefault($scope.options, "equality", defaultEquality);
+          ensureDefault($scope.options, "isLeaf", defaultIsLeaf);
+
+          $scope.selectedNodes = $scope.selectedNodes || [];
+          $scope.expandedNodes = $scope.expandedNodes || [];
+          $scope.expandedNodesMap = {};
+          for (var i=0; i < $scope.expandedNodes.length; i++) {
+            $scope.expandedNodesMap[""+i] = $scope.expandedNodes[i];
+          }
+          $scope.parentScopeOfTree = $scope.$parent;
+
+
+          function isSelectedNode(node) {
+            if (!$scope.options.multiSelection && ($scope.options.equality(node, $scope.selectedNode)))
+              return true;
+            else if ($scope.options.multiSelection && $scope.selectedNodes) {
+              for (var i = 0; (i < $scope.selectedNodes.length); i++) {
+                if ($scope.options.equality(node, $scope.selectedNodes[i])) {
+                  return true;
+                }
+              }
+              return false;
+            }
+          }
+
+          $scope.headClass = function(node) {
+            var liSelectionClass = classIfDefined($scope.options.injectClasses.liSelected, false);
+            var injectSelectionClass = "";
+            if (liSelectionClass && isSelectedNode(node))
+              injectSelectionClass = " " + liSelectionClass;
+            if ($scope.options.isLeaf(node))
+              return "tree-leaf" + injectSelectionClass;
+            if ($scope.expandedNodesMap[this.$id])
+              return "tree-expanded" + injectSelectionClass;
+            else
+              return "tree-collapsed" + injectSelectionClass;
+          };
+
+          $scope.iBranchClass = function() {
+            if ($scope.expandedNodesMap[this.$id])
+              return classIfDefined($scope.options.injectClasses.iExpanded);
+            else
+              return classIfDefined($scope.options.injectClasses.iCollapsed);
+          };
+
+          $scope.nodeExpanded = function() {
+            return !!$scope.expandedNodesMap[this.$id];
+          };
+
+          $scope.selectNodeHead = function() {
+            var expanding = $scope.expandedNodesMap[this.$id] === undefined;
+            $scope.expandedNodesMap[this.$id] = (expanding ? this.node : undefined);
+            if (expanding) {
+              $scope.expandedNodes.push(this.node);
+            }
+            else {
+              var index;
+              for (var i=0; (i < $scope.expandedNodes.length) && !index; i++) {
+                if ($scope.options.equality($scope.expandedNodes[i], this.node)) {
+                  index = i;
+                }
+              }
+              if (index != undefined)
+                $scope.expandedNodes.splice(index, 1);
+            }
+            if ($scope.onNodeToggle)
+              $scope.onNodeToggle({node: this.node, expanded: expanding});
+          };
+
+          $scope.selectNodeLabel = function( selectedNode ){
+            if(selectedNode[$scope.options.nodeChildren] && selectedNode[$scope.options.nodeChildren].length > 0){
+              this.selectNodeHead();
+            }
+            if($scope.options.dirSelectable || !(selectedNode[$scope.options.nodeChildren] && selectedNode[$scope.options.nodeChildren].length > 0) )
+             {
+              var selected = false;
+              if ($scope.options.multiSelection) {
+                var pos = $scope.selectedNodes.indexOf(selectedNode);
+                if (pos === -1) {
+                  $scope.selectedNodes.push(selectedNode);
+                  selected = true;
+                } else {
+                  $scope.selectedNodes.splice(pos, 1);
+                }
+              } else {
+                if ($scope.selectedNode != selectedNode) {
+                  $scope.selectedNode = selectedNode;
+                  selected = true;
+                }
+                else {
+                  $scope.selectedNode = undefined;
+                }
+              }
+              if ($scope.onSelection)
+                $scope.onSelection({node: selectedNode, selected: selected});
+            }
+          };
+
+
+          $scope.dblClickNode = function(selectedNode){
+            if($scope.onDblclick!=null){
+              $scope.onDblclick({node:selectedNode});
+            }
+          }
+
+          $scope.selectedClass = function() {
+            var isThisNodeSelected = isSelectedNode(this.node);
+            var labelSelectionClass = classIfDefined($scope.options.injectClasses.labelSelected, false);
+            var injectSelectionClass = "";
+            if (labelSelectionClass && isThisNodeSelected)
+              injectSelectionClass = " " + labelSelectionClass;
+
+            return isThisNodeSelected?"tree-selected" + injectSelectionClass:"";
+          };
+
+          //tree template
+          var orderBy = $scope.orderBy ? ' | orderBy:orderBy:reverseOrder' : '';
+          var template =
+            '<ul '+classIfDefined($scope.options.injectClasses.ul, true)+'>' +
+            '<li ng-repeat="node in node.' + $scope.options.nodeChildren + ' | filter:filterExpression:filterComparator ' + orderBy + '" ng-class="headClass(node)" '+classIfDefined($scope.options.injectClasses.li, true)+'>' +
+            '<i class="tree-branch-head" ng-class="iBranchClass()" ng-click="selectNodeHead(node)"></i>' +
+            '<i class="tree-leaf-head '+classIfDefined($scope.options.injectClasses.iLeaf, false)+'"></i>' +
+            '<div class="tree-label '+classIfDefined($scope.options.injectClasses.label, false)+'" ng-class="selectedClass()" ng-click="selectNodeLabel(node)" ng-dblclick="dblClickNode(node)" tree-transclude></div>' +
+            '<treeitem ng-if="nodeExpanded()"></treeitem>' +
+            '</li>' +
+            '</ul>';
+
+          this.template = $compile(template);
+        }],
+        compile: function(element, attrs, childTranscludeFn) {
+          return function ( scope, element, attrs, treemodelCntr ) {
+
+            scope.$watch("treeModel", function updateNodeOnRootScope(newValue) {
+              if (angular.isArray(newValue)) {
+                if (angular.isDefined(scope.node) && angular.equals(scope.node[scope.options.nodeChildren], newValue))
+                  return;
+                scope.node = {};
+                scope.synteticRoot = scope.node;
+                scope.node[scope.options.nodeChildren] = newValue;
+              }
+              else {
+                if (angular.equals(scope.node, newValue))
+                  return;
+                scope.node = newValue;
+              }
+            });
+
+            scope.$watchCollection('expandedNodes', function(newValue) {
+              var notFoundIds = 0;
+              var newExpandedNodesMap = {};
+              var $liElements = element.find('li');
+              var existingScopes = [];
+              // find all nodes visible on the tree and the scope $id of the scopes including them
+              angular.forEach($liElements, function(liElement) {
+                var $liElement = angular.element(liElement);
+                var liScope = $liElement.scope();
+                existingScopes.push(liScope);
+              });
+              // iterate over the newValue, the new expanded nodes, and for each find it in the existingNodesAndScopes
+              // if found, add the mapping $id -> node into newExpandedNodesMap
+              // if not found, add the mapping num -> node into newExpandedNodesMap
+              angular.forEach(newValue, function(newExNode) {
+                var found = false;
+                for (var i=0; (i < existingScopes.length) && !found; i++) {
+                  var existingScope = existingScopes[i];
+                  if (scope.options.equality(newExNode, existingScope.node)) {
+                    newExpandedNodesMap[existingScope.$id] = existingScope.node;
+                    found = true;
+                  }
+                }
+                if (!found)
+                  newExpandedNodesMap[notFoundIds++] = newExNode;
+              });
+              scope.expandedNodesMap = newExpandedNodesMap;
+            });
+
+//                        scope.$watch('expandedNodesMap', function(newValue) {
+//
+//                        });
+
+            //Rendering template for a root node
+            treemodelCntr.template( scope, function(clone) {
+              element.html('').append( clone );
+            });
+            // save the transclude function from compile (which is not bound to a scope as apposed to the one from link)
+            // we can fix this to work with the link transclude function with angular 1.2.6. as for angular 1.2.0 we need
+            // to keep using the compile function
+            scope.$treeTransclude = childTranscludeFn;
+          }
+        }
+      };
+    }])
+    .directive("treeitem", function() {
+      return {
+        restrict: 'E',
+        require: "^treecontrol",
+        link: function( scope, element, attrs, treemodelCntr) {
+          // Rendering template for the current node
+          treemodelCntr.template(scope, function(clone) {
+            element.html('').append(clone);
+          });
+        }
+      }
+    })
+    .directive("treeTransclude", function() {
+      return {
+        link: function(scope, element, attrs, controller) {
+          if (!scope.options.isLeaf(scope.node)) {
+            angular.forEach(scope.expandedNodesMap, function (node, id) {
+              if (scope.options.equality(node, scope.node)) {
+                scope.expandedNodesMap[scope.$id] = scope.node;
+                scope.expandedNodesMap[id] = undefined;
+              }
+            });
+          }
+          if (!scope.options.multiSelection && scope.options.equality(scope.node, scope.selectedNode)) {
+            scope.selectedNode = scope.node;
+          } else if (scope.options.multiSelection) {
+            var newSelectedNodes = [];
+            for (var i = 0; (i < scope.selectedNodes.length); i++) {
+              if (scope.options.equality(scope.node, scope.selectedNodes[i])) {
+                newSelectedNodes.push(scope.node);
+              }
+            }
+            scope.selectedNodes = newSelectedNodes;
+          }
+
+          // create a scope for the transclusion, whos parent is the parent of the tree control
+          scope.transcludeScope = scope.parentScopeOfTree.$new();
+          scope.transcludeScope.node = scope.node;
+          scope.transcludeScope.$parentNode = (scope.$parent.node === scope.synteticRoot)?null:scope.$parent.node;
+          scope.transcludeScope.$index = scope.$index;
+          scope.transcludeScope.$first = scope.$first;
+          scope.transcludeScope.$middle = scope.$middle;
+          scope.transcludeScope.$last = scope.$last;
+          scope.transcludeScope.$odd = scope.$odd;
+          scope.transcludeScope.$even = scope.$even;
+          scope.$on('$destroy', function() {
+            scope.transcludeScope.$destroy();
+          });
+
+          scope.$treeTransclude(scope.transcludeScope, function(clone) {
+            element.empty();
+            element.append(clone);
+          });
+        }
+      }
+    });
+})( angular );

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/webapp/app/js/services/kylinProperties.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/services/kylinProperties.js b/webapp/app/js/services/kylinProperties.js
index a03403b..546db2b 100644
--- a/webapp/app/js/services/kylinProperties.js
+++ b/webapp/app/js/services/kylinProperties.js
@@ -56,12 +56,20 @@ KylinApp.service('kylinConfig', function (AdminService, $log) {
   }
 
   this.getDeployEnv = function () {
+    this.deployEnv = this.getProperty("deploy.env");
     if (!this.deployEnv) {
-      this.deployEnv = this.getProperty("deploy.env").trim();
+      return "DEV";
     }
-    return this.deployEnv.toUpperCase();
+    return this.deployEnv.toUpperCase().trim();
   }
 
+  this.getHiveLimit = function () {
+    this.hiveLimit = this.getProperty("kylin.web.hive.limit");
+    if (!this.hiveLimit) {
+      return 20;
+    }
+    return this.hiveLimit;
+  }
   //fill config info for Config from backend
   this.initWebConfigInfo = function () {
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/webapp/app/js/services/tables.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/services/tables.js b/webapp/app/js/services/tables.js
index ca7fc42..4199d6c 100755
--- a/webapp/app/js/services/tables.js
+++ b/webapp/app/js/services/tables.js
@@ -17,7 +17,7 @@
  */
 
 KylinApp.factory('TableService', ['$resource', function ($resource, config) {
-  return $resource(Config.service.url + 'tables/:tableName/:action', {}, {
+  return $resource(Config.service.url + 'tables/:tableName/:action/:database', {}, {
     list: {method: 'GET', params: {}, cache: true, isArray: true},
     get: {method: 'GET', params: {}, isArray: false},
     getExd: {method: 'GET', params: {action: 'exd-map'}, isArray: false},
@@ -25,6 +25,8 @@ KylinApp.factory('TableService', ['$resource', function ($resource, config) {
     loadHiveTable: {method: 'POST', params: {}, isArray: false},
     unLoadHiveTable: {method: 'DELETE', params: {}, isArray: false},
     addStreamingSrc: {method: 'POST', params: {action:'addStreamingSrc'}, isArray: false},
-    genCardinality: {method: 'PUT', params: {action: 'cardinality'}, isArray: false}
+    genCardinality: {method: 'PUT', params: {action: 'cardinality'}, isArray: false},
+    showHiveDatabases: {method: 'GET', params: {action:'hive'}, cache: true, isArray: true},
+    showHiveTables: {method: 'GET', params: {action:'hive'}, cache: true, isArray: true}
   });
 }]);

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/webapp/app/partials/tables/source_table_tree.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/tables/source_table_tree.html b/webapp/app/partials/tables/source_table_tree.html
index 4eddc4f..c2dc219 100755
--- a/webapp/app/partials/tables/source_table_tree.html
+++ b/webapp/app/partials/tables/source_table_tree.html
@@ -26,6 +26,7 @@
         <div class="col-xs-5" style="padding-left: 0px;margin-top: 20px;">
             <div class="pull-right">
               <a class="btn btn-xs btn-primary" tooltip="Load Hive Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openModal()"><i class="fa fa-download"></i></a>
+              <a class="btn btn-xs btn-info" tooltip="Load Hive Table From Tree"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openTreeModal()"><i class="fa fa-download"></i></a>
               <a class="btn btn-xs btn-info" tooltip="UnLoad Hive Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openUnLoadModal()"><i class="fa fa-remove"></i></a>
               <a class="btn btn-xs btn-primary" tooltip="Add Streaming Table"  ng-if="userService.hasRole('ROLE_ADMIN')"  ng-click="openStreamingSourceModal()"><i class="fa fa-area-chart"></i></a>
             </div>
@@ -47,5 +48,30 @@
     </div>
 </div>
 
+<script type="text/ng-template" id="addHiveTableFromTree.html">
+  <div class="modal-header"><button class="close" type="button" data-dismiss="modal" ng-click="cancel()">×</button>
+    <h4>Load Hive Table Metadata From Tree</h4>
+  </div>
+  <div class="modal-body">
+    <span><strong>Project: </strong>{{ $parent.projectName!=null?$parent.projectName:'NULL'}}</span>
+    <div class="form-group searchBox">
+      <input type="text" placeholder="Filter ..." class="nav-search-input" ng-model="predicate" />
+    </div>
+    <loading ng-if="!hiveLoaded" text="Loading Databases..."></loading>
+    <treecontrol class="tree-light check" tree-model="treedata" selected-nodes="selectedNodes" filter-expression="predicate" on-selection="showSelected(node)" on-node-toggle="showToggle(node)" options="treeOptions">
+      <div ng-if="node.label==''&&node.id==0"><img src="image/ajax-loader.gif">Loading Tables...</div>
+      <button class="btn btn-xs btn-primary" ng-if="node.label==''&&node.id==65535" ng-click="showMoreClicked($parentNode)">Show More</button>
+      <button class="btn btn-xs btn-primary" ng-if="node.label==''&&node.id==65535" ng-click="showAllClicked($parentNode)">Show All</button>
+      {{node.label}}
+    </treecontrol>
+  </div>
+
+  <div class="modal-footer">
+    <button class="btn btn-primary" ng-click="add()">Sync</button>
+    <button class="btn btn-primary" ng-click="cancel()">Cancel</button>
+  </div>
+
+</script>
+
 <div ng-include="'partials/tables/table_load.html'"></div>
 <div ng-include="'partials/tables/table_unload.html'"></div>

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/webapp/bower.json
----------------------------------------------------------------------
diff --git a/webapp/bower.json b/webapp/bower.json
index 41144f9..bba4a52 100755
--- a/webapp/bower.json
+++ b/webapp/bower.json
@@ -32,7 +32,8 @@
     "bootstrap-sweetalert": "~0.4.3",
     "angular-toggle-switch":"1.3.0",
     "angular-ui-select": "0.13.2",
-    "angular-sanitize": "1.2.18"
+    "angular-sanitize": "1.2.18",
+    "angular-tree-control": "0.2.8"
   },
   "devDependencies": {
     "less.js": "~1.4.0",

http://git-wip-us.apache.org/repos/asf/kylin/blob/bc7d4f58/webapp/grunt.json
----------------------------------------------------------------------
diff --git a/webapp/grunt.json b/webapp/grunt.json
index 3219b5e..86ad1dc 100755
--- a/webapp/grunt.json
+++ b/webapp/grunt.json
@@ -19,7 +19,6 @@
                 "app/components/angularLocalStorage/src/angularLocalStorage.js",
                 "app/components/angular-base64/angular-base64.min.js",
                 "app/components/ng-grid/build/ng-grid.js",
-                "app/components/angular-tree-control/angular-tree-control.js",
                 "app/components/ace-builds/src-min-noconflict/ace.js",
                 "app/components/ace-builds/src-min-noconflict/ext-language_tools.js",
                 "app/components/ace-builds/src-min-noconflict/mode-json.js",


[40/43] kylin git commit: KYLIN-1421 Cube "source record" is always zero for streaming

Posted by sh...@apache.org.
KYLIN-1421 Cube "source record" is always zero for streaming


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/5c2c64ff
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/5c2c64ff
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/5c2c64ff

Branch: refs/heads/helix-rebase
Commit: 5c2c64ff1f18b72512ae3e1263240c08041b184e
Parents: 5774a2e
Author: shaofengshi <sh...@apache.org>
Authored: Tue Feb 16 10:30:05 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:20 2016 +0800

----------------------------------------------------------------------
 .../apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java    | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/5c2c64ff/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
index 20ff01d..d373e4a 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
@@ -116,6 +116,7 @@ public class StreamingCubeBuilder implements StreamingBatchBuilder {
         try {
             CubeSegment segment = cubeManager.appendSegments(cubeInstance, streamingBatch.getTimeRange().getFirst(), streamingBatch.getTimeRange().getSecond(), false, false);
             segment.setLastBuildJobID(segment.getUuid()); // give a fake job id
+            segment.setInputRecords(streamingBatch.getMessages().size());
             return segment;
         } catch (IOException e) {
             throw new RuntimeException("failed to create IBuildable", e);


[33/43] kylin git commit: KYLIN-1420 Query returns empty result on partition column's boundary condition

Posted by sh...@apache.org.
KYLIN-1420 Query returns empty result on partition column's boundary condition


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/036a75cb
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/036a75cb
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/036a75cb

Branch: refs/heads/helix-rebase
Commit: 036a75cbdb9a217859a13479b40846f4fa1ad749
Parents: efd0740
Author: shaofengshi <sh...@apache.org>
Authored: Mon Feb 15 15:52:15 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:20 2016 +0800

----------------------------------------------------------------------
 .../main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/036a75cb/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java
index a72426d..d314dde 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRangePlanner.java
@@ -108,8 +108,8 @@ public class GTScanRangePlanner {
         for (ColumnRange range : andDimRanges) {
             if (partitionColRef != null && range.column.equals(partitionColRef)) {
                 if (rangeStartEndComparator.comparator.compare(segmentStartAndEnd.getFirst(), range.end) <= 0 //
-                        && rangeStartEndComparator.comparator.compare(range.begin, segmentStartAndEnd.getSecond()) < 0) {
-                    //segment range is [Closed,Open)
+                        && rangeStartEndComparator.comparator.compare(range.begin, segmentStartAndEnd.getSecond()) <= 0) {
+                    //segment range is [Closed,Open), but segmentStartAndEnd.getSecond() might be rounded, so use <=. 
                 } else {
                     logger.debug("Pre-check partition col filter failed, partitionColRef {}, segment start {}, segment end {}, range begin {}, range end {}",//
                             new Object[] { partitionColRef, makeReadable(segmentStartAndEnd.getFirst()), makeReadable(segmentStartAndEnd.getSecond()), makeReadable(range.begin), makeReadable(range.end) });


[19/43] kylin git commit: rebase 2.x-staging

Posted by sh...@apache.org.
rebase 2.x-staging


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/40228682
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/40228682
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/40228682

Branch: refs/heads/helix-rebase
Commit: 40228682aa3a4f5a37d3299d5489d73368077745
Parents: daeaf08
Author: shaofengshi <sh...@apache.org>
Authored: Wed Dec 30 14:18:06 2015 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:18 2016 +0800

----------------------------------------------------------------------
 build/bin/kylin.sh                              | 10 +++++
 .../test_case_data/sandbox/kylin.properties     |  1 +
 .../java/org/apache/kylin/rest/DebugTomcat.java |  3 ++
 .../kylin/rest/controller/JobController.java    | 44 ++++++++++----------
 4 files changed, 37 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/40228682/build/bin/kylin.sh
----------------------------------------------------------------------
diff --git a/build/bin/kylin.sh b/build/bin/kylin.sh
index f4d9fac..074acce 100644
--- a/build/bin/kylin.sh
+++ b/build/bin/kylin.sh
@@ -73,6 +73,15 @@ then
     mkdir -p ${KYLIN_HOME}/ext
     export HBASE_CLASSPATH=$hive_dependency:${KYLIN_HOME}/lib/*:${KYLIN_HOME}/ext/*:${HBASE_CLASSPATH}
 
+    if [ -z "$KYLIN_REST_ADDRESS" ]
+    then
+        kylin_rest_address=`hostname`":"`grep "<Connector port=" ${tomcat_root}/conf/server.xml |grep protocol=\"HTTP/1.1\" | cut -d '=' -f 2 | cut -d \" -f 2`
+        echo "KYLIN_REST_ADDRESS not found, will use ${kylin_rest_address}"
+    else
+        echo "KYLIN_REST_ADDRESS is set to: $KYLIN_REST_ADDRESS"
+        kylin_rest_address=$KYLIN_REST_ADDRESS
+    fi
+
     #debug if encounter NoClassDefError
     #hbase classpath
 
@@ -88,6 +97,7 @@ then
     -Djava.io.tmpdir=${tomcat_root}/temp  \
     -Dkylin.hive.dependency=${hive_dependency} \
     -Dkylin.hbase.dependency=${hbase_dependency} \
+    -Dkylin.rest.address=${kylin_rest_address} \
     -Dspring.profiles.active=${spring_profile} \
     org.apache.hadoop.util.RunJar ${tomcat_root}/bin/bootstrap.jar  org.apache.catalina.startup.Bootstrap start >> ${KYLIN_HOME}/logs/kylin.out 2>&1 & echo $! > ${KYLIN_HOME}/pid &
     echo "A new Kylin instance is started by $USER, stop it using \"kylin.sh stop\""

http://git-wip-us.apache.org/repos/asf/kylin/blob/40228682/examples/test_case_data/sandbox/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
index 7c9919b..0d89b8c 100644
--- a/examples/test_case_data/sandbox/kylin.properties
+++ b/examples/test_case_data/sandbox/kylin.properties
@@ -4,6 +4,7 @@
 # currently it will be attached to each kylin's htable attribute
 kylin.owner=whoami@kylin.apache.org
 
+kylin.zookeeper.address=sandbox:2181
 # List of web servers in use, this enables one web server instance to sync up with other servers.
 kylin.rest.servers=localhost:7070
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/40228682/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java b/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
index c0f61d2..139cddc 100644
--- a/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
+++ b/server/src/main/java/org/apache/kylin/rest/DebugTomcat.java
@@ -30,6 +30,7 @@ import org.apache.catalina.startup.Tomcat;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.util.Shell;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.HostnameUtils;
 import org.apache.kylin.rest.util.ClasspathUtil;
 
 public class DebugTomcat {
@@ -45,6 +46,8 @@ public class DebugTomcat {
 
             System.setProperty("spring.profiles.active", "testing");
 
+            System.setProperty("kylin.rest.address", HostnameUtils.getHostname() + ":" + "7070");
+
             //avoid log permission issue
             if (System.getProperty("catalina.home") == null)
                 System.setProperty("catalina.home", ".");

http://git-wip-us.apache.org/repos/asf/kylin/blob/40228682/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/JobController.java b/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
index 5c835ac..4d0824a 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
@@ -26,6 +26,9 @@ import java.util.List;
 import java.util.Map;
 import java.util.TimeZone;
 
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import joptsimple.internal.Strings;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.job.JobInstance;
 import org.apache.kylin.job.constant.JobStatusEnum;
@@ -76,28 +79,15 @@ public class JobController extends BasicController implements InitializingBean {
         TimeZone tzone = TimeZone.getTimeZone(timeZone);
         TimeZone.setDefault(tzone);
 
-        final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-        String serverMode = kylinConfig.getServerMode();
-
-        if (Constant.SERVER_MODE_JOB.equals(serverMode.toLowerCase()) || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase())) {
-            logger.info("Initializing Job Engine ....");
-
-            new Thread(new Runnable() {
-                @Override
-                public void run() {
-                    try {
-                        DefaultScheduler scheduler = DefaultScheduler.getInstance();
-                        scheduler.init(new JobEngineConfig(kylinConfig), jobLock);
-                        while (!scheduler.hasStarted()) {
-                            logger.error("scheduler has not been started");
-                            Thread.sleep(1000);
-                        }
-                    } catch (Exception e) {
-                        throw new RuntimeException(e);
-                    }
-                }
-            }).start();
+        if (System.getProperty("kylin.rest.address") == null) {
+            throw new RuntimeException("There is no -Dkylin.rest.address set; Please check bin/kylin.sh");
         }
+
+        final String restAddress = System.getProperty("kylin.rest.address");
+        final String hostname = Preconditions.checkNotNull(restAddress.substring(0, restAddress.lastIndexOf(":")));
+        final String port = Preconditions.checkNotNull(restAddress.substring(restAddress.lastIndexOf(":") + 1));
+        final String instanceName = hostname + "_" + port;
+        final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
     }
 
     /**
@@ -209,4 +199,16 @@ public class JobController extends BasicController implements InitializingBean {
         this.jobService = jobService;
     }
 
+    private void updateKylinConfig(List<String> instances) {
+        List<String> instanceRestAddresses = Lists.newArrayList();
+        for (String instanceName : instances) {
+            int indexOfUnderscore = instanceName.lastIndexOf("_");
+            instanceRestAddresses.add(instanceName.substring(0, indexOfUnderscore) + ":" + instanceName.substring(indexOfUnderscore + 1));
+        }
+        String restServersInCluster = Strings.join(instanceRestAddresses, ",");
+        KylinConfig.getInstanceFromEnv().setProperty("kylin.rest.servers", restServersInCluster);
+        System.setProperty("kylin.rest.servers", restServersInCluster);
+
+    }
+
 }


[04/43] kylin git commit: KYLIN-1323 give “kylin.hbase.hfile.size.gb” a default value

Posted by sh...@apache.org.
KYLIN-1323 give “kylin.hbase.hfile.size.gb” a default value


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/7df1cc74
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/7df1cc74
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/7df1cc74

Branch: refs/heads/helix-rebase
Commit: 7df1cc74dcd9eda6d315cb9b7af5da9b9007bc9a
Parents: 0f48f10
Author: sunyerui <su...@gmail.com>
Authored: Mon Feb 29 12:12:15 2016 +0800
Committer: sunyerui <su...@gmail.com>
Committed: Mon Feb 29 12:12:15 2016 +0800

----------------------------------------------------------------------
 build/conf/kylin.properties                                        | 2 +-
 .../src/main/java/org/apache/kylin/common/KylinConfigBase.java     | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/7df1cc74/build/conf/kylin.properties
----------------------------------------------------------------------
diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
index b220b2d..5532339 100644
--- a/build/conf/kylin.properties
+++ b/build/conf/kylin.properties
@@ -66,7 +66,7 @@ kylin.hbase.region.cut.medium=10
 kylin.hbase.region.cut.large=50
 
 # The hfile size of GB, smaller hfile leading to the converting hfile MR has more reducers and be faster
-# set to 0 or comment this config to disable this optimization
+# set 0 to disable this optimization
 kylin.hbase.hfile.size.gb=5
 
 # Enable/disable ACL check for cube query

http://git-wip-us.apache.org/repos/asf/kylin/blob/7df1cc74/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 3430e0b..0cee9f8 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -412,7 +412,7 @@ public class KylinConfigBase implements Serializable {
     }
 
     public int getHBaseHFileSizeGB() {
-        return Integer.parseInt(getOptional("kylin.hbase.hfile.size.gb", "0"));
+        return Integer.parseInt(getOptional("kylin.hbase.hfile.size.gb", "5"));
     }
 
     public int getScanThreshold() {


[22/43] kylin git commit: KYLIN-1311 write rest servers to file

Posted by sh...@apache.org.
KYLIN-1311 write rest servers to file


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/ebef971b
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/ebef971b
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/ebef971b

Branch: refs/heads/helix-rebase
Commit: ebef971bc85262e38561fb494af9ccef0af2a47a
Parents: 1c4deab
Author: shaofengshi <sh...@apache.org>
Authored: Sun Jan 24 21:41:56 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:19 2016 +0800

----------------------------------------------------------------------
 build/bin/streaming_build.sh                    |  9 ++++--
 build/bin/streaming_fillgap.sh                  |  8 +++--
 .../org/apache/kylin/common/KylinConfig.java    | 31 ++++++++++++++++++++
 .../kylin/rest/helix/HelixClusterAdmin.java     | 27 ++++++++++-------
 4 files changed, 58 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/ebef971b/build/bin/streaming_build.sh
----------------------------------------------------------------------
diff --git a/build/bin/streaming_build.sh b/build/bin/streaming_build.sh
index a96ecc1..20b87d8 100644
--- a/build/bin/streaming_build.sh
+++ b/build/bin/streaming_build.sh
@@ -20,14 +20,17 @@
 source /etc/profile
 source ~/.bash_profile
 
-STREAMING=$1
+CUBE_NAME=$1
 INTERVAL=$2
 DELAY=$3
+MARGIN=$4
+AUTHORIZATION=$5
+KYLIN_HOST=$6
 CURRENT_TIME_IN_SECOND=`date +%s`
 CURRENT_TIME=$((CURRENT_TIME_IN_SECOND * 1000))
 START=$(($CURRENT_TIME - CURRENT_TIME%INTERVAL - DELAY))
 END=$(($CURRENT_TIME - CURRENT_TIME%INTERVAL - DELAY + INTERVAL))
 
 ID="$START"_"$END"
-echo "building for ${ID}" >> ${KYLIN_HOME}/logs/build_trace.log
-sh ${KYLIN_HOME}/bin/kylin.sh streaming start ${STREAMING} ${ID} -start ${START} -end ${END} -streaming ${STREAMING}
\ No newline at end of file
+echo "building for ${CUBE_NAME} ${ID}" >> ${KYLIN_HOME}/logs/build_trace.log
+curl --request PUT --data "{\"start\": $START, \"end\": $END }" --header "Authorization: Basic $AUTHORIZATION" --header "Content-Type: application/json" -v ${KYLIN_HOST}/kylin/api/streaming/${CUBE_NAME}/build

http://git-wip-us.apache.org/repos/asf/kylin/blob/ebef971b/build/bin/streaming_fillgap.sh
----------------------------------------------------------------------
diff --git a/build/bin/streaming_fillgap.sh b/build/bin/streaming_fillgap.sh
index 74d9037..31c4886 100644
--- a/build/bin/streaming_fillgap.sh
+++ b/build/bin/streaming_fillgap.sh
@@ -20,8 +20,10 @@
 source /etc/profile
 source ~/.bash_profile
 
-streaming=$1
-margin=$2
+CUBE_NAME=$1
+AUTHORIZATION=$2
+KYLIN_HOST=$3
 
 cd ${KYLIN_HOME}
-sh ${KYLIN_HOME}/bin/kylin.sh streaming start ${streaming} fillgap -streaming ${streaming} -fillGap true -margin ${margin}
\ No newline at end of file
+#sh ${KYLIN_HOME}/bin/kylin.sh streaming start ${streaming} fillgap -streaming ${streaming} -fillGap true -margin ${margin}
+curl --request PUT --header "Authorization: Basic $AUTHORIZATION" --header "Content-Type: application/json" -v ${KYLIN_HOST}/kylin/api/streaming/${CUBE_NAME}/fillgap

http://git-wip-us.apache.org/repos/asf/kylin/blob/ebef971b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
index 81f5827..08fb6dd 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
@@ -265,4 +265,35 @@ public class KylinConfig extends KylinConfigBase {
         }
     }
 
+    public static void writeOverrideProperties(Properties properties) throws IOException {
+        File propFile = getKylinProperties();
+        File overrideFile = new File(propFile.getParentFile(), propFile.getName() + ".override");
+        overrideFile.createNewFile();
+        FileInputStream fis2 = null;
+        Properties override = new Properties();
+        try {
+            fis2 = new FileInputStream(overrideFile);
+            override.load(fis2);
+            for (Map.Entry<Object, Object> entries : properties.entrySet()) {
+                override.setProperty(entries.getKey().toString(), entries.getValue().toString());
+            }
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        } finally {
+            IOUtils.closeQuietly(fis2);
+        }
+
+        PrintWriter pw = null;
+        try {
+            pw = new PrintWriter(overrideFile);
+            for (Enumeration e = override.propertyNames(); e.hasMoreElements();) {
+                String key = (String) e.nextElement();
+                pw.println(key + "=" + override.getProperty(key));
+            }
+            pw.close();
+        } finally {
+            IOUtils.closeQuietly(pw);
+        }
+        
+    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/ebef971b/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java b/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
index 0758ef1..4da9a86 100644
--- a/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
+++ b/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
@@ -40,8 +40,10 @@ import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.IOException;
 import java.util.List;
 import java.util.Map;
+import java.util.Properties;
 import java.util.Set;
 import java.util.concurrent.ConcurrentMap;
 
@@ -101,7 +103,7 @@ public class HelixClusterAdmin {
         addInstance(instanceName, instanceTags);
         startInstance(instanceName);
 
-        rebalanceWithTag(instanceTags);
+        rebalanceWithTag(RESOURCE_NAME_JOB_ENGINE, TAG_JOB_ENGINE);
 
         boolean startController = kylinConfig.isClusterController();
         if (startController) {
@@ -123,7 +125,7 @@ public class HelixClusterAdmin {
 
         // add job engine as a resource, 1 partition
         if (!admin.getResourcesInCluster(clusterName).contains(HelixClusterAdmin.RESOURCE_NAME_JOB_ENGINE)) {
-            admin.addResource(clusterName, HelixClusterAdmin.RESOURCE_NAME_JOB_ENGINE, 1, MODEL_LEADER_STANDBY, IdealState.RebalanceMode.SEMI_AUTO.name());
+            admin.addResource(clusterName, HelixClusterAdmin.RESOURCE_NAME_JOB_ENGINE, 1, MODEL_LEADER_STANDBY, IdealState.RebalanceMode.FULL_AUTO.name());
         }
 
     }
@@ -134,8 +136,8 @@ public class HelixClusterAdmin {
             logger.warn("Resource '" + resourceName + "' already exists in cluster, remove and re-add.");
             admin.dropResource(clusterName, resourceName);
         }
-        admin.addResource(clusterName, resourceName, 1, MODEL_LEADER_STANDBY, IdealState.RebalanceMode.SEMI_AUTO.name());
-        admin.rebalance(clusterName, resourceName, 2, "", TAG_STREAM_BUILDER);
+        admin.addResource(clusterName, resourceName, 1, MODEL_LEADER_STANDBY, IdealState.RebalanceMode.FULL_AUTO.name());
+        rebalanceWithTag(resourceName, TAG_STREAM_BUILDER);
 
     }
 
@@ -161,13 +163,9 @@ public class HelixClusterAdmin {
      * Rebalance the resource with the tags
      * @param tags
      */
-    protected void rebalanceWithTag(List<String> tags) {
-        for (String tag : tags) {
-            if (tag.equals(TAG_JOB_ENGINE)) {
-                List<String> instances = admin.getInstancesInClusterWithTag(clusterName, TAG_JOB_ENGINE);
-                admin.rebalance(clusterName, RESOURCE_NAME_JOB_ENGINE, instances.size(), "", tag);
-            }
-        }
+    protected void rebalanceWithTag(String resourceName, String tag) {
+        List<String> instances = admin.getInstancesInClusterWithTag(clusterName, tag);
+        admin.rebalance(clusterName, resourceName, instances.size(), "", tag);
     }
 
     /**
@@ -277,6 +275,13 @@ public class HelixClusterAdmin {
                 kylinConfig.setProperty("kylin.rest.servers", restServersInCluster);
                 System.setProperty("kylin.rest.servers", restServersInCluster);
                 logger.info("kylin.rest.servers update to " + restServersInCluster);
+                Properties properties = new Properties();
+                properties.setProperty("kylin.rest.servers", restServersInCluster);
+                try {
+                    KylinConfig.writeOverrideProperties(properties);
+                } catch (IOException e) {
+                    logger.error(e.getMessage(), e);
+                }
                 Broadcaster.clearCache();
             }
         }


[02/43] kylin git commit: KYLIN-1323 Improve performance of converting data to hfile

Posted by sh...@apache.org.
KYLIN-1323 Improve performance of converting data to hfile


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/ab4d8909
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/ab4d8909
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/ab4d8909

Branch: refs/heads/helix-rebase
Commit: ab4d8909ac85af87d8b8b443044c49a79f9e3ee4
Parents: 66294d3
Author: sunyerui <su...@gmail.com>
Authored: Sun Feb 28 21:02:25 2016 +0800
Committer: sunyerui <su...@gmail.com>
Committed: Sun Feb 28 21:12:28 2016 +0800

----------------------------------------------------------------------
 build/conf/kylin.properties                     |   4 +
 .../apache/kylin/common/KylinConfigBase.java    |   8 ++
 .../engine/mr/common/AbstractHadoopJob.java     |   2 +-
 .../kylin/engine/mr/common/BatchConstants.java  |   1 +
 .../mr/steps/RangeKeyDistributionJob.java       | 115 ----------------
 .../mr/steps/RangeKeyDistributionMapper.java    |  71 ----------
 .../mr/steps/RangeKeyDistributionReducer.java   | 100 --------------
 kylin-it/pom.xml                                |   3 +
 .../kylin/provision/BuildCubeWithEngine.java    |  41 ++++++
 .../kylin/storage/hbase/steps/CubeHFileJob.java |  37 ++++-
 .../kylin/storage/hbase/steps/HBaseMRSteps.java |   4 +-
 .../hbase/steps/RangeKeyDistributionJob.java    | 127 +++++++++++++++++
 .../hbase/steps/RangeKeyDistributionMapper.java |  76 +++++++++++
 .../steps/RangeKeyDistributionReducer.java      | 136 +++++++++++++++++++
 .../hbase/util/HBaseRegionSizeCalculator.java   |   8 ++
 .../steps/RangeKeyDistributionJobTest.java      |   1 -
 .../steps/RangeKeyDistributionMapperTest.java   |   1 -
 .../steps/RangeKeyDistributionReducerTest.java  |   1 -
 18 files changed, 443 insertions(+), 293 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/build/conf/kylin.properties
----------------------------------------------------------------------
diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
index 44a282e..b220b2d 100644
--- a/build/conf/kylin.properties
+++ b/build/conf/kylin.properties
@@ -65,6 +65,10 @@ kylin.hbase.region.cut.small=5
 kylin.hbase.region.cut.medium=10
 kylin.hbase.region.cut.large=50
 
+# The hfile size of GB, smaller hfile leading to the converting hfile MR has more reducers and be faster
+# set to 0 or comment this config to disable this optimization
+kylin.hbase.hfile.size.gb=5
+
 # Enable/disable ACL check for cube query
 kylin.query.security.enabled=true
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 7707684..3430e0b 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -407,6 +407,14 @@ public class KylinConfigBase implements Serializable {
         return Integer.parseInt(getOptional("kylin.hbase.region.count.max", "500"));
     }
 
+    public void setHBaseHFileSizeGB(int size) {
+        setProperty("kylin.hbase.hfile.size.gb", String.valueOf(size));
+    }
+
+    public int getHBaseHFileSizeGB() {
+        return Integer.parseInt(getOptional("kylin.hbase.hfile.size.gb", "0"));
+    }
+
     public int getScanThreshold() {
         return Integer.parseInt(getOptional("kylin.query.scan.threshold", "10000000"));
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
index 7615269..e4eee96 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
@@ -83,7 +83,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
     protected static final Option OPTION_INPUT_FORMAT = OptionBuilder.withArgName("inputformat").hasArg().isRequired(false).withDescription("Input format").create("inputformat");
     protected static final Option OPTION_OUTPUT_PATH = OptionBuilder.withArgName("path").hasArg().isRequired(true).withDescription("Output path").create("output");
     protected static final Option OPTION_NCUBOID_LEVEL = OptionBuilder.withArgName("level").hasArg().isRequired(true).withDescription("N-Cuboid build level, e.g. 1, 2, 3...").create("level");
-    protected static final Option OPTION_PARTITION_FILE_PATH = OptionBuilder.withArgName("path").hasArg().isRequired(true).withDescription("Partition file path.").create("input");
+    protected static final Option OPTION_PARTITION_FILE_PATH = OptionBuilder.withArgName("path").hasArg().isRequired(true).withDescription("Partition file path.").create("partitions");
     protected static final Option OPTION_HTABLE_NAME = OptionBuilder.withArgName("htable name").hasArg().isRequired(true).withDescription("HTable name").create("htablename");
 
     protected static final Option OPTION_STATISTICS_ENABLED = OptionBuilder.withArgName("statisticsenabled").hasArg().isRequired(false).withDescription("Statistics enabled").create("statisticsenabled");

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java
index 400a3aa..6943f18 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/BatchConstants.java
@@ -42,6 +42,7 @@ public interface BatchConstants {
     String REGION_NUMBER_MIN = "region.number.min";
     String REGION_NUMBER_MAX = "region.number.max";
     String REGION_SPLIT_SIZE = "region.split.size";
+    String HFILE_SIZE_GB = "hfile.size.gb";
     
     String CFG_KYLIN_LOCAL_TEMP_DIR = "/tmp/kylin/";
     String CFG_KYLIN_HDFS_TEMP_DIR = "/tmp/kylin/";

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionJob.java
deleted file mode 100644
index 5632fc1..0000000
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionJob.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.engine.mr.steps;
-
-import org.apache.commons.cli.Options;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.cube.CubeInstance;
-import org.apache.kylin.cube.CubeManager;
-import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
-import org.apache.kylin.engine.mr.common.BatchConstants;
-import org.apache.kylin.metadata.model.DataModelDesc;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * @author xjiang, ysong1
- * 
- */
-
-public class RangeKeyDistributionJob extends AbstractHadoopJob {
-    protected static final Logger logger = LoggerFactory.getLogger(RangeKeyDistributionJob.class);
-
-    /*
-     * (non-Javadoc)
-     * 
-     * @see org.apache.hadoop.util.Tool#run(java.lang.String[])
-     */
-    @Override
-    public int run(String[] args) throws Exception {
-        Options options = new Options();
-
-        try {
-            options.addOption(OPTION_INPUT_PATH);
-            options.addOption(OPTION_OUTPUT_PATH);
-            options.addOption(OPTION_JOB_NAME);
-            options.addOption(OPTION_CUBE_NAME);
-
-            parseOptions(options, args);
-
-            // start job
-            String jobName = getOptionValue(OPTION_JOB_NAME);
-            job = Job.getInstance(getConf(), jobName);
-
-            setJobClasspath(job);
-
-            addInputDirs(getOptionValue(OPTION_INPUT_PATH), job);
-
-            Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
-            FileOutputFormat.setOutputPath(job, output);
-            // job.getConfiguration().set("dfs.block.size", "67108864");
-
-            // Mapper
-            job.setInputFormatClass(SequenceFileInputFormat.class);
-            job.setMapperClass(RangeKeyDistributionMapper.class);
-            job.setMapOutputKeyClass(Text.class);
-            job.setMapOutputValueClass(LongWritable.class);
-
-            // Reducer - only one
-            job.setReducerClass(RangeKeyDistributionReducer.class);
-            job.setOutputFormatClass(SequenceFileOutputFormat.class);
-            job.setOutputKeyClass(Text.class);
-            job.setOutputValueClass(LongWritable.class);
-            job.setNumReduceTasks(1);
-
-            this.deletePath(job.getConfiguration(), output);
-
-            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
-            CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
-            CubeInstance cube = cubeMgr.getCube(cubeName);
-            DataModelDesc.RealizationCapacity cubeCapacity = cube.getDescriptor().getModel().getCapacity();
-            int regionSplitSize = KylinConfig.getInstanceFromEnv().getHBaseRegionCut(cubeCapacity.toString());
-            int maxRegionCount = KylinConfig.getInstanceFromEnv().getHBaseRegionCountMax();
-            int minRegionCount = KylinConfig.getInstanceFromEnv().getHBaseRegionCountMin();
-            
-            job.getConfiguration().set(BatchConstants.REGION_SPLIT_SIZE, String.valueOf(regionSplitSize));
-            job.getConfiguration().set(BatchConstants.REGION_NUMBER_MAX, String.valueOf(maxRegionCount));
-            job.getConfiguration().set(BatchConstants.REGION_NUMBER_MIN, String.valueOf(minRegionCount));
-            
-            return waitForCompletion(job);
-        } catch (Exception e) {
-            printUsage(options);
-            throw e;
-        }
-    }
-
-    public static void main(String[] args) throws Exception {
-        int exitCode = ToolRunner.run(new RangeKeyDistributionJob(), args);
-        System.exit(exitCode);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionMapper.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionMapper.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionMapper.java
deleted file mode 100644
index 47cbc95..0000000
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionMapper.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.engine.mr.steps;
-
-import java.io.IOException;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.kylin.engine.mr.KylinMapper;
-
-/**
- * @author ysong1
- * 
- */
-public class RangeKeyDistributionMapper extends KylinMapper<Text, Text, Text, LongWritable> {
-
-    private static final long ONE_MEGA_BYTES = 1L * 1024L * 1024L;
-
-    private LongWritable outputValue = new LongWritable(0);
-
-    private long bytesRead = 0;
-
-    private Text lastKey;
-
-    @Override
-    protected void setup(Context context) throws IOException {
-        super.bindCurrentConfiguration(context.getConfiguration());
-    }
-
-    @Override
-    public void map(Text key, Text value, Context context) throws IOException, InterruptedException {
-        lastKey = key;
-
-        int bytesLength = key.getLength() + value.getLength();
-        bytesRead += bytesLength;
-
-        if (bytesRead >= ONE_MEGA_BYTES) {
-            outputValue.set(bytesRead);
-            context.write(key, outputValue);
-
-            // reset bytesRead
-            bytesRead = 0;
-        }
-
-    }
-
-    @Override
-    protected void cleanup(Context context) throws IOException, InterruptedException {
-        if (lastKey != null) {
-            outputValue.set(bytesRead);
-            context.write(lastKey, outputValue);
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionReducer.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionReducer.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionReducer.java
deleted file mode 100644
index 68be74e..0000000
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RangeKeyDistributionReducer.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.engine.mr.steps;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.util.StringUtils;
-import org.apache.kylin.engine.mr.KylinReducer;
-import org.apache.kylin.engine.mr.common.BatchConstants;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * @author ysong1
- * 
- */
-public class RangeKeyDistributionReducer extends KylinReducer<Text, LongWritable, Text, LongWritable> {
-
-    public static final long ONE_GIGA_BYTES = 1024L * 1024L * 1024L;
-    private static final Logger logger = LoggerFactory.getLogger(RangeKeyDistributionReducer.class);
-
-    private LongWritable outputValue = new LongWritable(0);
-
-    private int minRegionCount = 1;
-    private int maxRegionCount = 500;
-    private int cut = 10;
-    private long bytesRead = 0;
-    private List<Text> gbPoints = new ArrayList<Text>();
-
-    @Override
-    protected void setup(Context context) throws IOException {
-        super.bindCurrentConfiguration(context.getConfiguration());
-
-        if (context.getConfiguration().get(BatchConstants.REGION_SPLIT_SIZE) != null) {
-            cut = Integer.valueOf(context.getConfiguration().get(BatchConstants.REGION_SPLIT_SIZE));
-        }
-
-        if (context.getConfiguration().get(BatchConstants.REGION_NUMBER_MIN) != null) {
-            minRegionCount = Integer.valueOf(context.getConfiguration().get(BatchConstants.REGION_NUMBER_MIN));
-        }
-
-        if (context.getConfiguration().get(BatchConstants.REGION_NUMBER_MAX) != null) {
-            maxRegionCount = Integer.valueOf(context.getConfiguration().get(BatchConstants.REGION_NUMBER_MAX));
-        }
-
-        logger.info("Chosen cut for htable is " + cut + ", max region count=" + maxRegionCount + ", min region count =" + minRegionCount);
-    }
-
-    @Override
-    public void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
-        for (LongWritable v : values) {
-            bytesRead += v.get();
-        }
-
-        if (bytesRead >= ONE_GIGA_BYTES) {
-            gbPoints.add(new Text(key));
-            bytesRead = 0; // reset bytesRead
-        }
-    }
-
-    @Override
-    protected void cleanup(Context context) throws IOException, InterruptedException {
-        int nRegion = Math.round((float) gbPoints.size() / (float) cut);
-        nRegion = Math.max(minRegionCount, nRegion);
-        nRegion = Math.min(maxRegionCount, nRegion);
-
-        int gbPerRegion = gbPoints.size() / nRegion;
-        gbPerRegion = Math.max(1, gbPerRegion);
-
-        System.out.println(nRegion + " regions");
-        System.out.println(gbPerRegion + " GB per region");
-
-        for (int i = gbPerRegion; i < gbPoints.size(); i += gbPerRegion) {
-            Text key = gbPoints.get(i);
-            outputValue.set(i);
-            System.out.println(StringUtils.byteToHexString(key.getBytes()) + "\t" + outputValue.get());
-            context.write(key, outputValue);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/kylin-it/pom.xml
----------------------------------------------------------------------
diff --git a/kylin-it/pom.xml b/kylin-it/pom.xml
index 6cb44a5..99b650c 100644
--- a/kylin-it/pom.xml
+++ b/kylin-it/pom.xml
@@ -301,6 +301,7 @@
                                     <classpathScope>test</classpathScope>
                                     <executable>java</executable>
                                     <arguments>
+                                        <argument>-DuseSandbox=true</argument>
                                         <argument>-Dhdp.version=${hdp.version}</argument>
                                         <argument>-DfastBuildMode=${fastBuildMode}</argument>
                                         <argument>-classpath</argument>
@@ -321,6 +322,7 @@
                                     <classpathScope>test</classpathScope>
                                     <executable>java</executable>
                                     <arguments>
+                                        <argument>-DuseSandbox=true</argument>
                                         <argument>-Dhdp.version=${hdp.version}</argument>
                                         <argument>-DfastBuildMode=${fastBuildMode}</argument>
                                         <argument>-classpath</argument>
@@ -341,6 +343,7 @@
                                     <classpathScope>test</classpathScope>
                                     <executable>java</executable>
                                     <arguments>
+                                        <argument>-DuseSandbox=true</argument>
                                         <argument>-Dhdp.version=${hdp.version}</argument>
                                         <argument>-DfastBuildMode=${fastBuildMode}</argument>
                                         <argument>-classpath</argument>

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
index 28808df..cfefef3 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
@@ -23,6 +23,7 @@ import java.io.IOException;
 import java.lang.reflect.Method;
 import java.text.SimpleDateFormat;
 import java.util.List;
+import java.util.Map;
 import java.util.TimeZone;
 import java.util.concurrent.Callable;
 import java.util.concurrent.CountDownLatch;
@@ -36,11 +37,14 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.AbstractKylinTestCase;
 import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
+import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.CubeSegment;
@@ -55,6 +59,8 @@ import org.apache.kylin.job.execution.DefaultChainedExecutable;
 import org.apache.kylin.job.execution.ExecutableState;
 import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
 import org.apache.kylin.job.manager.ExecutableManager;
+import org.apache.kylin.metadata.model.IEngineAware;
+import org.apache.kylin.storage.hbase.util.HBaseRegionSizeCalculator;
 import org.apache.kylin.storage.hbase.util.StorageCleanupJob;
 import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
 
@@ -143,8 +149,10 @@ public class BuildCubeWithEngine {
 
     public void build() throws Exception {
         DeployUtil.prepareTestDataForNormalCubes("test_kylin_cube_with_slr_left_join_empty");
+        KylinConfig.getInstanceFromEnv().setHBaseHFileSizeGB(1);
         testInner();
         testLeft();
+        KylinConfig.getInstanceFromEnv().setHBaseHFileSizeGB(0);
     }
 
     protected void waitForJob(String jobId) {
@@ -345,6 +353,9 @@ public class BuildCubeWithEngine {
         DefaultChainedExecutable job = EngineFactory.createBatchCubingJob(segment, "TEST");
         jobService.addJob(job);
         waitForJob(job.getId());
+        if (segment.getCubeDesc().getEngineType() == IEngineAware.ID_MR_V1) {
+            checkHFilesInHBase(segment);
+        }
         return job.getId();
     }
 
@@ -355,4 +366,34 @@ public class BuildCubeWithEngine {
         return exitCode;
     }
 
+    private void checkHFilesInHBase(CubeSegment segment) throws IOException {
+        Configuration conf = HBaseConfiguration.create(HadoopUtil.getCurrentConfiguration());
+        String tableName = segment.getStorageLocationIdentifier();
+        HTable table = new HTable(conf, tableName);
+        HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(table);
+        Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
+        long totalSize = 0;
+        for (Long size : sizeMap.values()) {
+            totalSize += size;
+        }
+        if (totalSize == 0) {
+            return;
+        }
+        Map<byte[], Pair<Integer, Integer>> countMap = cal.getRegionHFileCountMap();
+        // check if there's region contains more than one hfile, which means the hfile config take effects
+        boolean hasMultiHFileRegions = false;
+        for (Pair<Integer, Integer> count : countMap.values()) {
+            // check if hfile count is greater than store count
+            if (count.getSecond() > count.getFirst()) {
+                hasMultiHFileRegions = true;
+                break;
+            }
+        }
+        if (KylinConfig.getInstanceFromEnv().getHBaseHFileSizeGB() == 0 && hasMultiHFileRegions) {
+            throw new IOException("hfile size set to 0, but found region contains more than one hfiles");
+        } else if (KylinConfig.getInstanceFromEnv().getHBaseHFileSizeGB() > 0 && !hasMultiHFileRegions) {
+            throw new IOException("hfile size set greater than 0, but all regions still has only one hfile");
+        }
+    }
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java
index 1f0b1a0..a302daf 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java
@@ -18,17 +18,24 @@
 
 package org.apache.kylin.storage.hbase.steps;
 
+import java.io.IOException;
+
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat;
 import org.apache.hadoop.hbase.mapreduce.KeyValueSortReducer;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
+import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeInstance;
@@ -51,11 +58,14 @@ public class CubeHFileJob extends AbstractHadoopJob {
         try {
             options.addOption(OPTION_JOB_NAME);
             options.addOption(OPTION_CUBE_NAME);
+            options.addOption(OPTION_PARTITION_FILE_PATH);
             options.addOption(OPTION_INPUT_PATH);
             options.addOption(OPTION_OUTPUT_PATH);
             options.addOption(OPTION_HTABLE_NAME);
             parseOptions(options, args);
 
+            Path partitionFilePath = new Path(getOptionValue(OPTION_PARTITION_FILE_PATH));
+
             Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
             String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
 
@@ -82,8 +92,9 @@ public class CubeHFileJob extends AbstractHadoopJob {
             String tableName = getOptionValue(OPTION_HTABLE_NAME).toUpperCase();
             HTable htable = new HTable(conf, tableName);
 
-            //Automatic config !
+            // Automatic config !
             HFileOutputFormat.configureIncrementalLoad(job, htable);
+            reconfigurePartitions(conf, partitionFilePath);
 
             // set block replication to 3 for hfiles
             conf.set(DFSConfigKeys.DFS_REPLICATION_KEY, "3");
@@ -101,6 +112,30 @@ public class CubeHFileJob extends AbstractHadoopJob {
         }
     }
 
+    /**
+     * Check if there's partition files for hfile, if yes replace the table splits, to make the job more reducers
+     * @param conf the job configuration
+     * @param path the hfile partition file
+     * @throws IOException
+     */
+    @SuppressWarnings("deprecation")
+    private void reconfigurePartitions(Configuration conf, Path path) throws IOException {
+        FileSystem fs = path.getFileSystem(conf);
+        if (fs.exists(path)) {
+            try (SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf)) {
+                int partitionCount = 0;
+                Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
+                Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
+                while (reader.next(key, value)) {
+                    partitionCount++;
+                }
+                TotalOrderPartitioner.setPartitionFile(job.getConfiguration(), path);
+                // The reduce tasks should be one more than partition keys
+                job.setNumReduceTasks(partitionCount+1);
+            }
+        }
+    }
+
     public static void main(String[] args) throws Exception {
         int exitCode = ToolRunner.run(new CubeHFileJob(), args);
         System.exit(exitCode);

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
index c3bd7b5..2a21640 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
@@ -12,7 +12,6 @@ import org.apache.kylin.engine.mr.common.MapReduceExecutable;
 import org.apache.kylin.storage.hbase.ii.IIBulkLoadJob;
 import org.apache.kylin.storage.hbase.ii.IICreateHFileJob;
 import org.apache.kylin.storage.hbase.ii.IICreateHTableJob;
-import org.apache.kylin.engine.mr.steps.RangeKeyDistributionJob;
 import org.apache.kylin.job.constant.ExecutableConstants;
 import org.apache.kylin.job.execution.DefaultChainedExecutable;
 import org.apache.kylin.metadata.realization.IRealizationSegment;
@@ -72,7 +71,7 @@ public class HBaseMRSteps extends JobBuilderSupport {
         StringBuilder cmd = new StringBuilder();
         appendExecCmdParameters(cmd, "cubename", seg.getRealization().getName());
         appendExecCmdParameters(cmd, "segmentname", seg.getName());
-        appendExecCmdParameters(cmd, "input", getRowkeyDistributionOutputPath(jobId) + "/part-r-00000");
+        appendExecCmdParameters(cmd, "partitions", getRowkeyDistributionOutputPath(jobId) + "/part-r-00000");
         appendExecCmdParameters(cmd, "statisticsenabled", String.valueOf(withStats));
 
         createHtableStep.setJobParams(cmd.toString());
@@ -90,6 +89,7 @@ public class HBaseMRSteps extends JobBuilderSupport {
 
         appendMapReduceParameters(cmd, seg.getRealization().getDataModelDesc());
         appendExecCmdParameters(cmd, "cubename", seg.getRealization().getName());
+        appendExecCmdParameters(cmd, "partitions", getRowkeyDistributionOutputPath(jobId) + "/part-r-00000_hfile");
         appendExecCmdParameters(cmd, "input", inputPath);
         appendExecCmdParameters(cmd, "output", getHFilePath(jobId));
         appendExecCmdParameters(cmd, "htablename", seg.getStorageLocationIdentifier());

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJob.java
new file mode 100644
index 0000000..2ff7356
--- /dev/null
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJob.java
@@ -0,0 +1,127 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.storage.hbase.steps;
+
+import org.apache.commons.cli.Options;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
+import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
+import org.apache.kylin.engine.mr.common.BatchConstants;
+import org.apache.kylin.metadata.model.DataModelDesc;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author xjiang, ysong1
+ * 
+ */
+
+public class RangeKeyDistributionJob extends AbstractHadoopJob {
+    protected static final Logger logger = LoggerFactory.getLogger(RangeKeyDistributionJob.class);
+
+    /*
+     * (non-Javadoc)
+     * 
+     * @see org.apache.hadoop.util.Tool#run(java.lang.String[])
+     */
+    @Override
+    public int run(String[] args) throws Exception {
+        Options options = new Options();
+
+        try {
+            options.addOption(OPTION_INPUT_PATH);
+            options.addOption(OPTION_OUTPUT_PATH);
+            options.addOption(OPTION_JOB_NAME);
+            options.addOption(OPTION_CUBE_NAME);
+
+            parseOptions(options, args);
+
+            // start job
+            String jobName = getOptionValue(OPTION_JOB_NAME);
+            job = Job.getInstance(getConf(), jobName);
+
+            setJobClasspath(job);
+
+            addInputDirs(getOptionValue(OPTION_INPUT_PATH), job);
+
+            Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
+            FileOutputFormat.setOutputPath(job, output);
+            // job.getConfiguration().set("dfs.block.size", "67108864");
+
+            // Mapper
+            job.setInputFormatClass(SequenceFileInputFormat.class);
+            job.setMapperClass(RangeKeyDistributionMapper.class);
+            job.setMapOutputKeyClass(Text.class);
+            job.setMapOutputValueClass(LongWritable.class);
+
+            // Reducer - only one
+            job.setReducerClass(RangeKeyDistributionReducer.class);
+            job.setOutputFormatClass(SequenceFileOutputFormat.class);
+            job.setOutputKeyClass(Text.class);
+            job.setOutputValueClass(LongWritable.class);
+            job.setNumReduceTasks(1);
+
+            this.deletePath(job.getConfiguration(), output);
+
+            String cubeName = getOptionValue(OPTION_CUBE_NAME).toUpperCase();
+            CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
+            CubeInstance cube = cubeMgr.getCube(cubeName);
+            int hfileSizeGB = KylinConfig.getInstanceFromEnv().getHBaseHFileSizeGB();
+            DataModelDesc.RealizationCapacity cubeCapacity = cube.getDescriptor().getModel().getCapacity();
+            int regionSplitSize = KylinConfig.getInstanceFromEnv().getHBaseRegionCut(cubeCapacity.toString());
+            int maxRegionCount = KylinConfig.getInstanceFromEnv().getHBaseRegionCountMax();
+            int minRegionCount = KylinConfig.getInstanceFromEnv().getHBaseRegionCountMin();
+            job.getConfiguration().set(BatchConstants.OUTPUT_PATH, output.toString());
+            job.getConfiguration().set(BatchConstants.HFILE_SIZE_GB, String.valueOf(hfileSizeGB));
+            job.getConfiguration().set(BatchConstants.REGION_SPLIT_SIZE, String.valueOf(regionSplitSize));
+            job.getConfiguration().set(BatchConstants.REGION_NUMBER_MAX, String.valueOf(maxRegionCount));
+            job.getConfiguration().set(BatchConstants.REGION_NUMBER_MIN, String.valueOf(minRegionCount));
+            // The partition file for hfile is sequenece file consists of ImmutableBytesWritable and NullWritable
+            TableMapReduceUtil.addDependencyJars(job.getConfiguration(), ImmutableBytesWritable.class, NullWritable.class);
+
+            // Passed the sandbox property to mapper, to simulate large dataset
+            if (System.getProperty("useSandbox") != null && System.getProperty("useSandbox").equals("true")) {
+                job.getConfiguration().setBoolean("useSandbox", true);
+            }
+            
+            return waitForCompletion(job);
+        } catch (Exception e) {
+            printUsage(options);
+            throw e;
+        }
+    }
+
+    public static void main(String[] args) throws Exception {
+        int exitCode = ToolRunner.run(new RangeKeyDistributionJob(), args);
+        System.exit(exitCode);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapper.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapper.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapper.java
new file mode 100644
index 0000000..6f2d2bc
--- /dev/null
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapper.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.storage.hbase.steps;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.kylin.engine.mr.KylinMapper;
+
+/**
+ * @author ysong1
+ * 
+ */
+public class RangeKeyDistributionMapper extends KylinMapper<Text, Text, Text, LongWritable> {
+
+    private static final long ONE_MEGA_BYTES = 1L * 1024L * 1024L;
+
+    private LongWritable outputValue = new LongWritable(0);
+
+    private long bytesRead = 0;
+
+    private Text lastKey;
+
+    private Long scaleFactorForSandbox = 1L;
+
+    @Override
+    protected void setup(Context context) throws IOException {
+        super.bindCurrentConfiguration(context.getConfiguration());
+        if (context.getConfiguration().getBoolean("useSandbox", false)) {
+            scaleFactorForSandbox = 1024L;
+        }
+    }
+
+    @Override
+    public void map(Text key, Text value, Context context) throws IOException, InterruptedException {
+        lastKey = key;
+
+        int bytesLength = key.getLength() + value.getLength();
+        bytesRead += bytesLength;
+
+        if ((bytesRead * scaleFactorForSandbox) >= ONE_MEGA_BYTES) {
+            outputValue.set(bytesRead * scaleFactorForSandbox);
+            context.write(key, outputValue);
+
+            // reset bytesRead
+            bytesRead = 0;
+        }
+
+    }
+
+    @Override
+    protected void cleanup(Context context) throws IOException, InterruptedException {
+        if (lastKey != null) {
+            outputValue.set(bytesRead);
+            context.write(lastKey, outputValue);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducer.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducer.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducer.java
new file mode 100644
index 0000000..acdab62
--- /dev/null
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducer.java
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.storage.hbase.steps;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.kylin.engine.mr.KylinReducer;
+import org.apache.kylin.engine.mr.common.BatchConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author ysong1
+ * 
+ */
+public class RangeKeyDistributionReducer extends KylinReducer<Text, LongWritable, Text, LongWritable> {
+
+    public static final long ONE_GIGA_BYTES = 1024L * 1024L * 1024L;
+    private static final Logger logger = LoggerFactory.getLogger(RangeKeyDistributionReducer.class);
+
+    private LongWritable outputValue = new LongWritable(0);
+
+    private int minRegionCount = 1;
+    private int maxRegionCount = 500;
+    private int cut = 10;
+    private int hfileSizeGB = 1;
+    private long bytesRead = 0;
+    private List<Text> gbPoints = new ArrayList<Text>();
+    private String output = null;
+
+    @Override
+    protected void setup(Context context) throws IOException {
+        super.bindCurrentConfiguration(context.getConfiguration());
+
+        if (context.getConfiguration().get(BatchConstants.OUTPUT_PATH) != null) {
+            output = context.getConfiguration().get(BatchConstants.OUTPUT_PATH);
+        }
+
+        if (context.getConfiguration().get(BatchConstants.HFILE_SIZE_GB) != null) {
+            hfileSizeGB = Integer.valueOf(context.getConfiguration().get(BatchConstants.HFILE_SIZE_GB));
+        }
+
+        if (context.getConfiguration().get(BatchConstants.REGION_SPLIT_SIZE) != null) {
+            cut = Integer.valueOf(context.getConfiguration().get(BatchConstants.REGION_SPLIT_SIZE));
+        }
+
+        if (context.getConfiguration().get(BatchConstants.REGION_NUMBER_MIN) != null) {
+            minRegionCount = Integer.valueOf(context.getConfiguration().get(BatchConstants.REGION_NUMBER_MIN));
+        }
+
+        if (context.getConfiguration().get(BatchConstants.REGION_NUMBER_MAX) != null) {
+            maxRegionCount = Integer.valueOf(context.getConfiguration().get(BatchConstants.REGION_NUMBER_MAX));
+        }
+
+        logger.info("Chosen cut for htable is " + cut + ", max region count=" + maxRegionCount
+            + ", min region count=" + minRegionCount + ", hfile size=" + hfileSizeGB);
+
+        // add empty key at position 0
+        gbPoints.add(new Text());
+    }
+
+    @Override
+    public void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
+        for (LongWritable v : values) {
+            bytesRead += v.get();
+        }
+
+        if (bytesRead >= ONE_GIGA_BYTES) {
+            gbPoints.add(new Text(key));
+            bytesRead = 0; // reset bytesRead
+        }
+    }
+
+    @Override
+    protected void cleanup(Context context) throws IOException, InterruptedException {
+        int nRegion = Math.round((float) gbPoints.size() / (float) cut);
+        nRegion = Math.max(minRegionCount, nRegion);
+        nRegion = Math.min(maxRegionCount, nRegion);
+
+        int gbPerRegion = gbPoints.size() / nRegion;
+        gbPerRegion = Math.max(1, gbPerRegion);
+
+        if (hfileSizeGB <= 0) {
+            hfileSizeGB = gbPerRegion;
+        }
+        int hfilePerRegion = gbPerRegion / hfileSizeGB;
+        hfilePerRegion = Math.max(1, hfilePerRegion);
+
+        System.out.println(nRegion + " regions");
+        System.out.println(gbPerRegion + " GB per region");
+        System.out.println(hfilePerRegion + " hfile per region");
+
+        Path hfilePartitionFile = new Path(output + "/part-r-00000_hfile");
+        try (SequenceFile.Writer hfilePartitionWriter = new SequenceFile.Writer(
+                hfilePartitionFile.getFileSystem(context.getConfiguration()),
+                context.getConfiguration(), hfilePartitionFile, ImmutableBytesWritable.class, NullWritable.class)) {
+            int hfileCountInOneRegion = 0;
+            for (int i = hfileSizeGB; i < gbPoints.size(); i += hfileSizeGB) {
+                hfilePartitionWriter.append(new ImmutableBytesWritable(gbPoints.get(i).getBytes()), NullWritable.get());
+                if (++hfileCountInOneRegion >= hfilePerRegion) {
+                    Text key = gbPoints.get(i);
+                    outputValue.set(i);
+                    System.out.println(StringUtils.byteToHexString(key.getBytes()) + "\t" + outputValue.get());
+                    context.write(key, outputValue);
+
+                    hfileCountInOneRegion = 0;
+                }
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
index ba0da00..346c3a2 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.kylin.common.util.Pair;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -49,6 +50,8 @@ public class HBaseRegionSizeCalculator {
      **/
     private final Map<byte[], Long> sizeMap = new TreeMap<byte[], Long>(Bytes.BYTES_COMPARATOR);
 
+    private final Map<byte[], Pair<Integer, Integer>> countMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
+
     static final String ENABLE_REGIONSIZECALCULATOR = "hbase.regionsizecalculator.enable";
 
     /**
@@ -93,6 +96,7 @@ public class HBaseRegionSizeCalculator {
 
                         long regionSizeBytes = regionLoad.getStorefileSizeMB() * megaByte;
                         sizeMap.put(regionId, regionSizeBytes);
+                        countMap.put(regionId, new Pair<>(regionLoad.getStores(), regionLoad.getStorefiles()));
 
                         // logger.info("Region " + regionLoad.getNameAsString()
                         // + " has size " + regionSizeBytes);
@@ -125,4 +129,8 @@ public class HBaseRegionSizeCalculator {
     public Map<byte[], Long> getRegionSizeMap() {
         return Collections.unmodifiableMap(sizeMap);
     }
+
+    public Map<byte[], Pair<Integer, Integer>> getRegionHFileCountMap() {
+        return Collections.unmodifiableMap(countMap);
+    }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJobTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJobTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJobTest.java
index 7f5b24b..70e1ac7 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJobTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJobTest.java
@@ -27,7 +27,6 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
 import org.apache.kylin.engine.mr.HadoopUtil;
-import org.apache.kylin.engine.mr.steps.RangeKeyDistributionJob;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java
index ca716c3..03a3cba 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java
@@ -29,7 +29,6 @@ import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mrunit.mapreduce.MapDriver;
 import org.apache.hadoop.mrunit.types.Pair;
-import org.apache.kylin.engine.mr.steps.RangeKeyDistributionMapper;
 import org.junit.Before;
 import org.junit.Test;
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/ab4d8909/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducerTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducerTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducerTest.java
index cbf0657..c027c40 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducerTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducerTest.java
@@ -24,7 +24,6 @@ import java.io.IOException;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mrunit.mapreduce.ReduceDriver;
-import org.apache.kylin.engine.mr.steps.RangeKeyDistributionReducer;
 import org.junit.Before;
 import org.junit.Test;
 


[39/43] kylin git commit: KYLIN-1417 Change to use TreeMap to allow null as value

Posted by sh...@apache.org.
KYLIN-1417 Change to use TreeMap to allow null as value


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/0ff0e6d5
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/0ff0e6d5
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/0ff0e6d5

Branch: refs/heads/helix-rebase
Commit: 0ff0e6d504188fb0edbfc9be7e4e952079172182
Parents: 542f9a2
Author: shaofengshi <sh...@apache.org>
Authored: Tue Feb 16 10:29:47 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:20 2016 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/source/kafka/TimedJsonStreamParser.java | 8 ++------
 1 file changed, 2 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/0ff0e6d5/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
index b075387..4dccb17 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/TimedJsonStreamParser.java
@@ -35,11 +35,7 @@
 package org.apache.kylin.source.kafka;
 
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.ConcurrentSkipListMap;
 
@@ -106,7 +102,7 @@ public final class TimedJsonStreamParser extends StreamingParser {
     public StreamingMessage parse(MessageAndOffset messageAndOffset) {
         try {
             Map<String, String> message = mapper.readValue(new ByteBufferBackedInputStream(messageAndOffset.message().payload()), mapType);
-            ConcurrentMap<String, String> root = new ConcurrentSkipListMap<String, String>(String.CASE_INSENSITIVE_ORDER);
+            Map<String, String> root = new TreeMap<String, String>(String.CASE_INSENSITIVE_ORDER);
             root.putAll(message);
             String tsStr = root.get(tsColName);
             //Preconditions.checkArgument(!StringUtils.isEmpty(tsStr), "Timestamp field " + tsColName + //


[16/43] kylin git commit: KYLIN-1465 log improvement on query comparision test

Posted by sh...@apache.org.
KYLIN-1465 log improvement on query comparision test


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/c4d94f79
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/c4d94f79
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/c4d94f79

Branch: refs/heads/helix-rebase
Commit: c4d94f793f22e1c86f3f23d5f83a790b24e17966
Parents: 61f3278
Author: Hongbin Ma <ma...@apache.org>
Authored: Thu Mar 3 10:31:48 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Thu Mar 3 11:09:26 2016 +0800

----------------------------------------------------------------------
 .../apache/kylin/query/ITCombinationTest.java   |  5 +-
 .../apache/kylin/query/ITKylinQueryTest.java    |  6 +-
 .../org/apache/kylin/query/KylinTestBase.java   | 69 ++++++++++----------
 3 files changed, 39 insertions(+), 41 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/c4d94f79/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
index f4bbe5f..1845587 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
@@ -36,11 +36,12 @@ public class ITCombinationTest extends ITKylinQueryTest {
 
     @BeforeClass
     public static void setUp() throws SQLException {
-        System.out.println("setUp in ITCombinationTest");
+        printInfo("setUp in ITCombinationTest");
     }
 
     @AfterClass
     public static void tearDown() {
+        printInfo("tearDown in ITCombinationTest");
         clean();
         HBaseStorage.overwriteStorageQuery = null;
     }
@@ -58,6 +59,8 @@ public class ITCombinationTest extends ITKylinQueryTest {
 
     public ITCombinationTest(String joinType, String coprocessorToggle, String queryEngine) throws Exception {
 
+        printInfo("Into combination join type: " + joinType + ", coprocessor toggle: " + coprocessorToggle + ", query engine: " + queryEngine);
+
         ITKylinQueryTest.clean();
 
         ITKylinQueryTest.joinType = joinType;

http://git-wip-us.apache.org/repos/asf/kylin/blob/c4d94f79/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
index fd88452..54abd4d 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
@@ -46,10 +46,9 @@ import org.junit.Test;
 public class ITKylinQueryTest extends KylinTestBase {
     private static CacheManager cacheManager;
 
-
     @BeforeClass
     public static void setUp() throws Exception {
-        printInfo("set up in KylinQueryTest");
+        printInfo("setUp in KylinQueryTest");
         joinType = "left";
 
         setupAll();
@@ -57,8 +56,7 @@ public class ITKylinQueryTest extends KylinTestBase {
 
     @AfterClass
     public static void tearDown() throws Exception {
-        printInfo("tearDown");
-        printInfo("Closing connection...");
+        printInfo("tearDown in KylinQueryTest");
         clean();
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/c4d94f79/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
index b5c6d10..c4a94ed 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
@@ -36,19 +36,24 @@ import java.sql.Statement;
 import java.sql.Timestamp;
 import java.sql.Types;
 import java.util.ArrayList;
+import java.util.Comparator;
 import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Set;
+import java.util.TreeSet;
 import java.util.logging.LogManager;
 
+import com.google.common.base.Throwables;
 import org.apache.commons.lang3.StringUtils;
+import org.apache.commons.lang3.exception.ExceptionContext;
 import org.apache.kylin.common.KylinConfig;
 import org.dbunit.Assertion;
 import org.dbunit.database.DatabaseConfig;
 import org.dbunit.database.DatabaseConnection;
 import org.dbunit.database.IDatabaseConnection;
 import org.dbunit.dataset.DataSetException;
+import org.dbunit.dataset.DefaultTable;
 import org.dbunit.dataset.ITable;
 import org.dbunit.dataset.SortedTable;
 import org.dbunit.dataset.datatype.DataType;
@@ -58,6 +63,8 @@ import org.dbunit.ext.h2.H2DataTypeFactory;
 import org.junit.Assert;
 
 import com.google.common.io.Files;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  */
@@ -96,39 +103,26 @@ public class KylinTestBase {
         }
     }
 
+    private static class FileByNameComparator implements Comparator<File> {
+        @Override
+        public int compare(File o1, File o2) {
+            return String.CASE_INSENSITIVE_ORDER.compare(o1.getName(), o2.getName());
+        }
+    }
+
     /**
      * @param folder
-     * @param fileType
-     *            specify the interested file type by file extension
+     * @param fileType specify the interested file type by file extension
      * @return
      */
     protected static List<File> getFilesFromFolder(final File folder, final String fileType) {
-        List<File> files = new ArrayList<File>();
+        Set<File> set = new TreeSet<>(new FileByNameComparator());
         for (final File fileEntry : folder.listFiles()) {
             if (fileEntry.getName().toLowerCase().endsWith(fileType.toLowerCase())) {
-                files.add(fileEntry);
+                set.add(fileEntry);
             }
         }
-        return files;
-    }
-
-    protected static void getFilesFromFolderR(final String directoryStr, List<File> files, final String fileType) {
-        File folder = new File(directoryStr);
-        for (final File fileEntry : folder.listFiles()) {
-            if (fileEntry.isDirectory()) {
-                getFilesFromFolderR(fileEntry.getAbsolutePath(), files, fileType);
-            } else if (fileEntry.isFile()) {
-                if (fileEntry.getName().toLowerCase().endsWith(fileType.toLowerCase())) {
-                    files.add(fileEntry);
-                }
-            }
-        }
-    }
-
-    protected static void putTextTofile(File file, String sql) throws IOException {
-        BufferedWriter writer = new BufferedWriter(new FileWriter(file));
-        writer.write(sql, 0, sql.length());
-        writer.close();
+        return new ArrayList<>(set);
     }
 
     protected static String getTextFromFile(File file) throws IOException {
@@ -293,14 +287,6 @@ public class KylinTestBase {
         return ret;
     }
 
-    protected static void batchChangeJoinType(String targetType) throws IOException {
-        List<File> files = new LinkedList<File>();
-        getFilesFromFolderR("src/test/resources/query", files, ".sql");
-        for (File file : files) {
-            String x = changeJoinType(getTextFromFile(file), targetType);
-            putTextTofile(file, x);
-        }
-    }
 
     protected void execQueryUsingH2(String queryFolder, boolean needSort) throws Exception {
         printInfo("---------- Running H2 queries: " + queryFolder);
@@ -363,8 +349,14 @@ public class KylinTestBase {
             h2Conn.getConfig().setProperty(DatabaseConfig.PROPERTY_DATATYPE_FACTORY, new TestH2DataTypeFactory());
             ITable h2Table = executeQuery(h2Conn, queryName, sql, needSort);
 
-            // compare the result
-            Assert.assertEquals(h2Table.getRowCount(), kylinTable.getRowCount());
+
+            try {
+                // compare the result
+                Assert.assertEquals(h2Table.getRowCount(), kylinTable.getRowCount());
+            } catch (Throwable t) {
+                printInfo("execAndCompResultSize failed on: " + sqlFile.getAbsolutePath());
+                throw t;
+            }
 
             compQueryCount++;
             if (kylinTable.getRowCount() == 0) {
@@ -396,8 +388,13 @@ public class KylinTestBase {
             h2Conn.getConfig().setProperty(DatabaseConfig.PROPERTY_DATATYPE_FACTORY, new TestH2DataTypeFactory());
             ITable h2Table = executeQuery(h2Conn, queryName, sql, needSort);
 
-            // compare the result
-            Assertion.assertEquals(h2Table, kylinTable);
+            try {
+                // compare the result
+                Assertion.assertEquals(h2Table, kylinTable);
+            } catch (Throwable t) {
+                printInfo("execAndCompQuery failed on: " + sqlFile.getAbsolutePath());
+                throw t;
+            }
 
             compQueryCount++;
             if (kylinTable.getRowCount() == 0) {


[42/43] kylin git commit: KYLIN-1038 retry on job failure

Posted by sh...@apache.org.
KYLIN-1038 retry on job failure


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/3bb345ea
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/3bb345ea
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/3bb345ea

Branch: refs/heads/helix-rebase
Commit: 3bb345ea0b47fbddd3ed4c8e86e8f71eda63b395
Parents: b922b48
Author: shaofengshi <sh...@apache.org>
Authored: Sun Feb 14 21:17:12 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:20 2016 +0800

----------------------------------------------------------------------
 build/conf/kylin.properties                     |  3 ++
 .../apache/kylin/common/KylinConfigBase.java    |  4 ++
 .../kylin/job/execution/AbstractExecutable.java | 57 +++++++++++++-------
 .../job/execution/DefaultChainedExecutable.java |  5 ++
 4 files changed, 50 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/3bb345ea/build/conf/kylin.properties
----------------------------------------------------------------------
diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
index 5a0b776..aa07e7c 100644
--- a/build/conf/kylin.properties
+++ b/build/conf/kylin.properties
@@ -40,6 +40,9 @@ kylin.hbase.cluster.fs=
 
 kylin.job.mapreduce.default.reduce.input.mb=500
 
+# max job retry on error, default 0: no retry
+kylin.job.retry=0
+
 # If true, job engine will not assume that hadoop CLI reside on the same server as it self
 # you will have to specify kylin.job.remote.cli.hostname, kylin.job.remote.cli.username and kylin.job.remote.cli.password
 # It should not be set to "true" unless you're NOT running Kylin.sh on a hadoop client machine 

http://git-wip-us.apache.org/repos/asf/kylin/blob/3bb345ea/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index cee92d5..15f76c7 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -574,6 +574,10 @@ public class KylinConfigBase implements Serializable {
         setProperty("kylin.rest.address", restAddress);
     }
     
+    public int getJobRetry() {
+        return Integer.parseInt(this.getOptional("kylin.job.retry", "0"));
+    }
+    
     public String toString() {
         return getMetadataUrl();
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/3bb345ea/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java b/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
index e1d7106..8d5fea5 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
@@ -18,13 +18,10 @@
 
 package org.apache.kylin.job.execution;
 
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
-
+import com.google.common.base.Objects;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.kylin.common.KylinConfig;
@@ -35,10 +32,12 @@ import org.apache.kylin.job.manager.ExecutableManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Objects;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
 
 /**
  */
@@ -50,6 +49,7 @@ public abstract class AbstractExecutable implements Executable, Idempotent {
     protected static final String END_TIME = "endTime";
 
     protected static final Logger logger = LoggerFactory.getLogger(AbstractExecutable.class);
+    protected int retry = 0;
 
     private String name;
     private String id;
@@ -99,15 +99,30 @@ public abstract class AbstractExecutable implements Executable, Idempotent {
         logger.info("Executing >>>>>>>>>>>>>   " + this.getName() + "   <<<<<<<<<<<<<");
 
         Preconditions.checkArgument(executableContext instanceof DefaultContext);
-        ExecuteResult result;
-        try {
-            onExecuteStart(executableContext);
-            result = doWork(executableContext);
-        } catch (Throwable e) {
-            logger.error("error running Executable", e);
-            onExecuteError(e, executableContext);
-            throw new ExecuteException(e);
+        ExecuteResult result = null;
+
+        onExecuteStart(executableContext);
+        Throwable exception;
+        do {
+            if (retry > 0) {
+                logger.info("Retry " + retry);
+            }
+            exception = null;
+            result = null;
+            try {
+                result = doWork(executableContext);
+            } catch (Throwable e) {
+                logger.error("error running Executable", e);
+                exception = e;
+            }
+            retry++;
+        } while (((result != null && result.succeed() == false) || exception != null) && needRetry() == true);
+        
+        if (exception != null) {
+            onExecuteError(exception, executableContext);
+            throw new ExecuteException(exception);
         }
+        
         onExecuteFinished(result, executableContext);
         return result;
     }
@@ -301,6 +316,10 @@ public abstract class AbstractExecutable implements Executable, Idempotent {
         return status == ExecutableState.DISCARDED;
     }
 
+    protected boolean needRetry() {
+        return this.retry <= KylinConfig.getInstanceFromEnv().getJobRetry();
+    }
+
     @Override
     public String toString() {
         return Objects.toStringHelper(this).add("id", getId()).add("name", getName()).add("state", getStatus()).toString();

http://git-wip-us.apache.org/repos/asf/kylin/blob/3bb345ea/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java b/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
index 2e95711..7403715 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
@@ -106,6 +106,11 @@ public class DefaultChainedExecutable extends AbstractExecutable implements Chai
         return subTasks;
     }
 
+    @Override
+    protected boolean needRetry() {
+        return false;
+    }
+
     public final AbstractExecutable getTaskByName(String name) {
         for (AbstractExecutable task : subTasks) {
             if (task.getName() != null && task.getName().equalsIgnoreCase(name)) {


[36/43] kylin git commit: KYLIN-1311 fix CI

Posted by sh...@apache.org.
KYLIN-1311 fix CI


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/b922b48f
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/b922b48f
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/b922b48f

Branch: refs/heads/helix-rebase
Commit: b922b48f847b0ccfaef3a7e5845dfb553b85515c
Parents: 6dd4925
Author: shaofengshi <sh...@apache.org>
Authored: Wed Feb 10 21:23:35 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:20 2016 +0800

----------------------------------------------------------------------
 .../rest/controller/ClusterController.java      |  6 +++-
 .../rest/controller/StreamingController.java    |  6 ++--
 .../helix/StreamCubeBuildTransitionHandler.java | 12 +++++--
 .../apache/kylin/rest/service/CubeService.java  | 11 ++++--
 .../kylin/rest/service/StreamingService.java    | 13 ++++---
 .../rest/controller/UserControllerTest.java     | 12 ++-----
 .../kylin/rest/helix/HelixClusterAdminTest.java |  6 +++-
 .../kylin/rest/service/CacheServiceTest.java    |  2 --
 .../kylin/rest/service/ServiceTestBase.java     | 36 ++++++++------------
 .../rest/service/TestBaseWithZookeeper.java     |  9 ++---
 10 files changed, 56 insertions(+), 57 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/b922b48f/server/src/main/java/org/apache/kylin/rest/controller/ClusterController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/ClusterController.java b/server/src/main/java/org/apache/kylin/rest/controller/ClusterController.java
index 86a0398..51db0a7 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/ClusterController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/ClusterController.java
@@ -25,6 +25,7 @@ import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.engine.streaming.StreamingManager;
 import org.apache.kylin.job.engine.JobEngineConfig;
 import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
+import org.apache.kylin.job.lock.JobLock;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.helix.HelixClusterAdmin;
 import org.apache.kylin.rest.request.StreamingBuildRequest;
@@ -32,6 +33,7 @@ import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.InitializingBean;
+import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
@@ -48,6 +50,8 @@ import java.util.Collection;
 public class ClusterController extends BasicController implements InitializingBean {
     private static final Logger logger = LoggerFactory.getLogger(ClusterController.class);
 
+    @Autowired
+    private JobLock jobLock;
     /*
      * (non-Javadoc)
      * 
@@ -78,7 +82,7 @@ public class ClusterController extends BasicController implements InitializingBe
                     public void run() {
                         try {
                             DefaultScheduler scheduler = DefaultScheduler.createInstance();
-                            scheduler.init(new JobEngineConfig(kylinConfig), new ZookeeperJobLock());
+                            scheduler.init(new JobEngineConfig(kylinConfig), jobLock);
                             if (!scheduler.hasStarted()) {
                                 logger.error("scheduler has not been started");
                                 System.exit(1);

http://git-wip-us.apache.org/repos/asf/kylin/blob/b922b48f/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java b/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
index e33a1c9..74b0dae 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
@@ -253,8 +253,10 @@ public class StreamingController extends BasicController {
         streamingBuildRequest.setStreaming(streamingConfig.getName());
         try {
             streamingService.buildStream(cube, streamingBuildRequest);
-        } catch (IOException e) {
-            e.printStackTrace();
+        } catch (IOException e) {  
+            logger.error("", e);
+            streamingBuildRequest.setSuccessful(false);
+            streamingBuildRequest.setMessage("Failed to submit job for " + streamingBuildRequest.getStreaming() + ", error is: " + e.getMessage());
             return streamingBuildRequest;
         }
         streamingBuildRequest.setMessage("Build request is submitted successfully.");

http://git-wip-us.apache.org/repos/asf/kylin/blob/b922b48f/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java b/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java
index 705d8a7..4652d0d 100644
--- a/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java
+++ b/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java
@@ -2,6 +2,7 @@ package org.apache.kylin.rest.helix;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
+import org.apache.commons.io.IOUtils;
 import org.apache.helix.NotificationContext;
 import org.apache.helix.api.TransitionHandler;
 import org.apache.helix.model.Message;
@@ -120,17 +121,22 @@ public class StreamCubeBuildTransitionHandler extends TransitionHandler {
 
     private void runCMD(String cmd) {
         logger.info("Executing: " + cmd);
+        BufferedReader input = null;
+        Process p = null;
         try {
             String line;
-            Process p = Runtime.getRuntime().exec(cmd);
-            BufferedReader input = new BufferedReader(new InputStreamReader(p.getInputStream()));
+            p = Runtime.getRuntime().exec(cmd);
+            input = new BufferedReader(new InputStreamReader(p.getInputStream()));
             while ((line = input.readLine()) != null) {
                 logger.info(line);
             }
-            input.close();
+
+            logger.info("Successfully start: " + cmd);
         } catch (IOException err) {
             logger.error("Error happens when running '" + cmd + "'", err);
             throw new RuntimeException(err);
+        } finally {
+            IOUtils.closeQuietly(input);
         }
 
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/b922b48f/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
index e7411a9..b2a278a 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -590,10 +590,15 @@ public class CubeService extends BasicService {
     public void updateOnNewSegmentReady(String cubeName) {
         logger.debug("on updateOnNewSegmentReady: " + cubeName);
         final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-        boolean isLeaderRole = true;
+        boolean isLeaderRole = false;
         if (kylinConfig.isClusterEnabled()) {
-            HelixClusterAdmin jobEngineAdmin = HelixClusterAdmin.getInstance(kylinConfig);
-            isLeaderRole = jobEngineAdmin.isLeaderRole(HelixClusterAdmin.RESOURCE_NAME_JOB_ENGINE);
+            HelixClusterAdmin clusterAdmin = HelixClusterAdmin.getInstance(kylinConfig);
+            isLeaderRole = clusterAdmin.isLeaderRole(HelixClusterAdmin.RESOURCE_NAME_JOB_ENGINE);
+        } else {
+            String serverMode = kylinConfig.getServerMode();
+            if (Constant.SERVER_MODE_JOB.equals(serverMode.toLowerCase()) || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase())) {
+                isLeaderRole = true;
+            }
         }
         logger.debug("server is leader role ? " + isLeaderRole);
         if (isLeaderRole == true) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/b922b48f/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java b/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
index 6e732d9..28b9472 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
@@ -101,14 +101,13 @@ public class StreamingService extends BasicService {
 
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
     public void buildStream(CubeInstance cube, StreamingBuildRequest streamingBuildRequest) throws IOException {
-        HelixClusterAdmin clusterAdmin = HelixClusterAdmin.getInstance(KylinConfig.getInstanceFromEnv());
-        try {
-            clusterAdmin.addStreamingJob(streamingBuildRequest);
-        } catch (IOException e) {
-            logger.error("", e);
-            streamingBuildRequest.setSuccessful(false);
-            streamingBuildRequest.setMessage("Failed to submit job for " + streamingBuildRequest.getStreaming());
+        final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+        if (kylinConfig.isClusterEnabled() == false) {
+            throw new IllegalStateException("Set kylin.cluster.enabled to true to enable streaming feature.");
         }
+        
+        HelixClusterAdmin clusterAdmin = HelixClusterAdmin.getInstance(kylinConfig);
+        clusterAdmin.addStreamingJob(streamingBuildRequest);
     }
 
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")

http://git-wip-us.apache.org/repos/asf/kylin/blob/b922b48f/server/src/test/java/org/apache/kylin/rest/controller/UserControllerTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/controller/UserControllerTest.java b/server/src/test/java/org/apache/kylin/rest/controller/UserControllerTest.java
index fe0e67a..2c4a1cb 100644
--- a/server/src/test/java/org/apache/kylin/rest/controller/UserControllerTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/controller/UserControllerTest.java
@@ -18,22 +18,14 @@
 
 package org.apache.kylin.rest.controller;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.kylin.rest.service.ServiceTestBase;
 import org.junit.Assert;
 import org.junit.Before;
-import org.junit.BeforeClass;
 import org.junit.Test;
-import org.springframework.security.authentication.TestingAuthenticationToken;
-import org.springframework.security.core.Authentication;
-import org.springframework.security.core.GrantedAuthority;
-import org.springframework.security.core.context.SecurityContextHolder;
-import org.springframework.security.core.userdetails.User;
 import org.springframework.security.core.userdetails.UserDetails;
 
+import java.io.IOException;
+
 /**
  * @author xduo
  */

http://git-wip-us.apache.org/repos/asf/kylin/blob/b922b48f/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java b/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java
index 1c8b779..24a8bb3 100644
--- a/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/helix/HelixClusterAdminTest.java
@@ -22,6 +22,7 @@ import org.I0Itec.zkclient.ZkClient;
 import org.I0Itec.zkclient.ZkServer;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.fs.FileUtil;
+import org.apache.helix.HelixAdmin;
 import org.apache.helix.manager.zk.ZKHelixAdmin;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
@@ -44,6 +45,7 @@ public class HelixClusterAdminTest extends TestBaseWithZookeeper {
 
     HelixClusterAdmin clusterAdmin1;
     HelixClusterAdmin clusterAdmin2;
+    HelixAdmin zkHelixAdmin;
     KylinConfig kylinConfig;
 
     private static final String CLUSTER_NAME = "test_cluster";
@@ -54,7 +56,7 @@ public class HelixClusterAdminTest extends TestBaseWithZookeeper {
         kylinConfig.setRestAddress("localhost:7070");
         kylinConfig.setClusterName(CLUSTER_NAME);
         
-        final ZKHelixAdmin zkHelixAdmin = new ZKHelixAdmin(zkAddress);
+        zkHelixAdmin = new ZKHelixAdmin(zkAddress);
         zkHelixAdmin.dropCluster(kylinConfig.getClusterName());
 
     }
@@ -117,6 +119,8 @@ public class HelixClusterAdminTest extends TestBaseWithZookeeper {
         if (clusterAdmin2 != null) {
             clusterAdmin2.stop();
         }
+
+        zkHelixAdmin.dropCluster(CLUSTER_NAME);
         
         cleanupTestMetadata();
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/b922b48f/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java b/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
index 8193884..c347219 100644
--- a/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/service/CacheServiceTest.java
@@ -52,8 +52,6 @@
 //
 //    private static Server server;
 //
-//    private static String ZK_ADDRESS = "localhost:2199";
-//
 //    private static KylinConfig configA;
 //    private static KylinConfig configB;
 //

http://git-wip-us.apache.org/repos/asf/kylin/blob/b922b48f/server/src/test/java/org/apache/kylin/rest/service/ServiceTestBase.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/service/ServiceTestBase.java b/server/src/test/java/org/apache/kylin/rest/service/ServiceTestBase.java
index ca4fe39..020b2d0 100644
--- a/server/src/test/java/org/apache/kylin/rest/service/ServiceTestBase.java
+++ b/server/src/test/java/org/apache/kylin/rest/service/ServiceTestBase.java
@@ -18,12 +18,6 @@
 
 package org.apache.kylin.rest.service;
 
-import com.google.common.collect.Lists;
-import org.I0Itec.zkclient.IDefaultNameSpace;
-import org.I0Itec.zkclient.ZkClient;
-import org.I0Itec.zkclient.ZkServer;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
 import org.apache.kylin.cube.CubeDescManager;
 import org.apache.kylin.cube.CubeManager;
@@ -32,42 +26,40 @@ import org.apache.kylin.invertedindex.IIManager;
 import org.apache.kylin.metadata.MetadataManager;
 import org.apache.kylin.metadata.project.ProjectManager;
 import org.apache.kylin.metadata.realization.RealizationRegistry;
-import org.apache.kylin.rest.helix.HelixClusterAdmin;
 import org.junit.*;
 import org.junit.runner.RunWith;
 import org.springframework.security.authentication.TestingAuthenticationToken;
 import org.springframework.security.core.Authentication;
+import org.springframework.security.core.authority.AuthorityUtils;
 import org.springframework.security.core.context.SecurityContextHolder;
 import org.springframework.security.core.userdetails.User;
-import org.springframework.security.core.userdetails.UserDetails;
 import org.springframework.test.context.ActiveProfiles;
 import org.springframework.test.context.ContextConfiguration;
 import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
 
-import java.io.File;
-import java.util.Arrays;
-import java.util.List;
-
 /**
  * @author xduo
  */
 @RunWith(SpringJUnit4ClassRunner.class)
-@ContextConfiguration(locations = { "classpath:applicationContext.xml", "classpath:kylinSecurity.xml" })
+@ContextConfiguration(locations = {"classpath:applicationContext.xml", "classpath:kylinSecurity.xml"})
 @ActiveProfiles("testing")
-public class ServiceTestBase extends TestBaseWithZookeeper {
+public class ServiceTestBase extends LocalFileMetadataTestCase {
+
+    @BeforeClass
+    public static void setupResource() throws Exception {
+        staticCreateTestMetadata();
+        Authentication authentication = new TestingAuthenticationToken(new User("ADMIN", "ADMIN", AuthorityUtils.createAuthorityList("ROLE_ADMIN")), "ADMIN", "ROLE_ADMIN");
+        SecurityContextHolder.getContext().setAuthentication(authentication);
+    }
+
+    @AfterClass
+    public static void tearDownResource() {
+    }
 
     @Before
     public void setup() throws Exception {
         this.createTestMetadata();
 
-        UserService.UserGrantedAuthority userGrantedAuthority = new UserService.UserGrantedAuthority();
-        userGrantedAuthority.setAuthority("ROLE_ADMIN");
-        UserDetails user = new User("ADMIN", "skippped-ldap", Lists.newArrayList(userGrantedAuthority));
-        Authentication authentication = new TestingAuthenticationToken(user, "ADMIN", "ROLE_ADMIN");
-        SecurityContextHolder.getContext().setAuthentication(authentication);
-        KylinConfig kylinConfig = this.getTestConfig();
-        kylinConfig.setRestAddress("localhost:7070");
-
         MetadataManager.clearCache();
         CubeDescManager.clearCache();
         CubeManager.clearCache();

http://git-wip-us.apache.org/repos/asf/kylin/blob/b922b48f/server/src/test/java/org/apache/kylin/rest/service/TestBaseWithZookeeper.java
----------------------------------------------------------------------
diff --git a/server/src/test/java/org/apache/kylin/rest/service/TestBaseWithZookeeper.java b/server/src/test/java/org/apache/kylin/rest/service/TestBaseWithZookeeper.java
index 3182c16..e51e632 100644
--- a/server/src/test/java/org/apache/kylin/rest/service/TestBaseWithZookeeper.java
+++ b/server/src/test/java/org/apache/kylin/rest/service/TestBaseWithZookeeper.java
@@ -25,9 +25,6 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
-import org.springframework.security.authentication.TestingAuthenticationToken;
-import org.springframework.security.core.Authentication;
-import org.springframework.security.core.context.SecurityContextHolder;
 
 import java.io.File;
 
@@ -35,7 +32,7 @@ import java.io.File;
  */
 public class TestBaseWithZookeeper extends LocalFileMetadataTestCase {
     protected static final String zkAddress = "localhost:2199";
-    static ZkServer server;
+    static ZkServer server = null;
     static boolean zkStarted = false;
 
     @BeforeClass
@@ -57,13 +54,13 @@ public class TestBaseWithZookeeper extends LocalFileMetadataTestCase {
             zkStarted = true;
             System.setProperty("kylin.zookeeper.address", zkAddress);
         }
-
     }
 
     @AfterClass
     public static void tearDownResource() {
-        if (server == null) {
+        if (server != null) {
             server.shutdown();
+            server = null;
             zkStarted = false;
             System.setProperty("kylin.zookeeper.address", "");
         }


[35/43] kylin git commit: Revert "KYLIN-1415 Cube parallel merge"

Posted by sh...@apache.org.
Revert "KYLIN-1415 Cube parallel merge"

This reverts commit 8aef3cab02bc670dbabb7d1418b8e3a8a394f8fc.


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/efd07403
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/efd07403
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/efd07403

Branch: refs/heads/helix-rebase
Commit: efd0740362b610a8fb0b819820a194eede806a32
Parents: 495ad92
Author: shaofengshi <sh...@apache.org>
Authored: Tue Feb 16 10:40:07 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:20 2016 +0800

----------------------------------------------------------------------
 .../java/org/apache/kylin/cube/CubeManager.java | 48 +++++++-------------
 .../apache/kylin/rest/service/CubeService.java  |  2 +-
 2 files changed, 18 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/efd07403/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
index 16922ac..84dd30a 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
@@ -18,11 +18,13 @@
 
 package org.apache.kylin.cube;
 
-import com.google.common.base.Function;
-import com.google.common.collect.Collections2;
-import com.google.common.collect.HashMultimap;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Multimap;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+
+import javax.annotation.Nullable;
+
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
@@ -55,11 +57,11 @@ import org.apache.kylin.source.SourceFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.annotation.Nullable;
-import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.*;
-import java.util.concurrent.ConcurrentHashMap;
+import com.google.common.base.Function;
+import com.google.common.collect.Collections2;
+import com.google.common.collect.HashMultimap;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Multimap;
 
 /**
  * @author yangli9
@@ -456,14 +458,8 @@ public class CubeManager implements IRealizationProvider {
     }
 
     public CubeSegment mergeSegments(CubeInstance cube, final long startDate, final long endDate, boolean forceMergeEmptySeg) throws IOException {
-        return mergeSegments(cube, startDate, endDate, forceMergeEmptySeg, true);
-    }
-
-    public CubeSegment mergeSegments(CubeInstance cube, final long startDate, final long endDate, boolean forceMergeEmptySeg, boolean strictCheck) throws IOException {
+        checkNoBuildingSegment(cube);
         checkCubeIsPartitioned(cube);
-        
-        if (strictCheck)
-            checkNoBuildingSegment(cube);
 
         Pair<Long, Long> range = alignMergeRange(cube, startDate, endDate);
         CubeSegment newSegment = newSegment(cube, range.getFirst(), range.getSecond());
@@ -623,23 +619,13 @@ public class CubeManager implements IRealizationProvider {
             return null;
         }
 
-        List<CubeSegment> readySegments = Lists.newArrayList(cube.getSegments(SegmentStatusEnum.READY));
-
-        if (readySegments.size() == 0) {
-            logger.debug("Cube " + cube.getName() + " has no ready segment to merge");
+        if (cube.getBuildingSegments().size() > 0) {
+            logger.debug("Cube " + cube.getName() + " has bulding segment, will not trigger merge at this moment");
             return null;
         }
-        List<CubeSegment> buildingSegments = Lists.newArrayList(cube.getSegments(SegmentStatusEnum.NEW));
-        List<CubeSegment> toSkipSegments = Lists.newArrayList();
-        for (CubeSegment building : buildingSegments) {
-            for (CubeSegment ready : readySegments) {
-                if (ready.getDateRangeStart() >= building.getDateRangeStart() && ready.getDateRangeEnd() <= building.getDateRangeEnd()) {
-                    toSkipSegments.add(ready);
-                }
-            }
-        }
 
-        readySegments.removeAll(toSkipSegments);
+        List<CubeSegment> readySegments = Lists.newArrayList(cube.getSegments(SegmentStatusEnum.READY));
+
         if (readySegments.size() == 0) {
             logger.debug("Cube " + cube.getName() + " has no ready segment to merge");
             return null;

http://git-wip-us.apache.org/repos/asf/kylin/blob/efd07403/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
index abc613b..b2a278a 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -653,7 +653,7 @@ public class CubeService extends BasicService {
                     cube = getCubeManager().getCube(cubeName);
                     CubeSegment newSeg = getCubeManager().autoMergeCubeSegments(cube);
                     if (newSeg != null) {
-                        newSeg = getCubeManager().mergeSegments(cube, newSeg.getDateRangeStart(), newSeg.getDateRangeEnd(), true, false);
+                        newSeg = getCubeManager().mergeSegments(cube, newSeg.getDateRangeStart(), newSeg.getDateRangeEnd(), true);
                         logger.debug("Will submit merge job on " + newSeg);
                         DefaultChainedExecutable job = EngineFactory.createBatchMergeJob(newSeg, "SYSTEM");
                         getExecutableManager().addJob(job);


[14/43] kylin git commit: KYLIN-1456 do not display date as 1970-01-01 as default value in datepicker-popup

Posted by sh...@apache.org.
KYLIN-1456 do not display date as 1970-01-01 as default value in datepicker-popup


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/a5410681
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/a5410681
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/a5410681

Branch: refs/heads/helix-rebase
Commit: a54106810a40c2a1065ee9b781f96fd2f1df298d
Parents: 1ea781f
Author: Jason <ji...@163.com>
Authored: Wed Mar 2 17:51:24 2016 +0800
Committer: Jason <ji...@163.com>
Committed: Wed Mar 2 17:51:36 2016 +0800

----------------------------------------------------------------------
 webapp/app/js/directives/directives.js                 | 6 ++++++
 webapp/app/less/app.less                               | 4 ++++
 webapp/app/partials/cubeDesigner/refresh_settings.html | 2 +-
 3 files changed, 11 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/a5410681/webapp/app/js/directives/directives.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/directives/directives.js b/webapp/app/js/directives/directives.js
index d07cee1..a9cd956 100644
--- a/webapp/app/js/directives/directives.js
+++ b/webapp/app/js/directives/directives.js
@@ -228,6 +228,12 @@ KylinApp.directive('kylinPagination', function ($parse, $q) {
       require: 'ngModel',
       link: function (scope, element, attrs, ctrl) {
         ctrl.$formatters.push(function (value) {
+
+          //set null for 0
+          if(value===0){
+            return null;
+          }
+
           //return value;
           var date = new Date(value + (60000 * new Date().getTimezoneOffset()));
           return date;

http://git-wip-us.apache.org/repos/asf/kylin/blob/a5410681/webapp/app/less/app.less
----------------------------------------------------------------------
diff --git a/webapp/app/less/app.less b/webapp/app/less/app.less
index 40d3280..b6b3131 100644
--- a/webapp/app/less/app.less
+++ b/webapp/app/less/app.less
@@ -784,3 +784,7 @@ input[placeholder] {
 input:-moz-placeholder {
   text-overflow: ellipsis;
 }
+
+.dropdown-menu{
+  z-index:9999;
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/a5410681/webapp/app/partials/cubeDesigner/refresh_settings.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/cubeDesigner/refresh_settings.html b/webapp/app/partials/cubeDesigner/refresh_settings.html
index f371c33..15dd4af 100755
--- a/webapp/app/partials/cubeDesigner/refresh_settings.html
+++ b/webapp/app/partials/cubeDesigner/refresh_settings.html
@@ -131,7 +131,7 @@
                       Please input start date when partition date column is defined in model.
                     </small>
                     <!--vier model will convert use filter-->
-                    <span ng-if="state.mode=='view'&&metaModel.model.partition_desc.partition_date_column">{{(cubeMetaFrame.partition_date_start)|reverseToGMT0 }}</span>
+                    <span ng-if="state.mode=='view' && metaModel.model.partition_desc.partition_date_column!=null && metaModel.model.partition_desc.partition_date_column">{{(cubeMetaFrame.partition_date_start)|reverseToGMT0 }}</span>
                   </div>
                 </div>
               </div>


[38/43] kylin git commit: initial commit for KYLIN-1431

Posted by sh...@apache.org.
initial commit for KYLIN-1431


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/66b84a2c
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/66b84a2c
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/66b84a2c

Branch: refs/heads/helix-rebase
Commit: 66b84a2c7e4a353cd2cf50149a9596d47eb9677c
Parents: 7edc8b8
Author: shaofengshi <sh...@apache.org>
Authored: Wed Mar 2 11:16:46 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:20 2016 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/job/CubeMetaExtractor.java |  13 +-
 .../kylin/metadata/model/ISourceAware.java      |   1 +
 .../kylin/engine/streaming/BootstrapConfig.java |  12 +-
 .../kylin/engine/streaming/IStreamingInput.java |   3 +-
 .../streaming/OneOffStreamingBuilder.java       |  17 +-
 .../kylin/engine/streaming/StreamingConfig.java |  57 ++---
 .../engine/streaming/StreamingManager.java      |  20 +-
 .../engine/streaming/cli/StreamingCLI.java      |  19 +-
 .../engine/streaming/util/StreamingUtils.java   |  18 +-
 .../kafka/default.streaming_table.json          |  21 ++
 .../kafka/test_streaming_table_cube.json        |  22 --
 .../streaming/default.streaming_table.json      |   6 +
 .../kylin/provision/BuildCubeWithStream.java    |  16 +-
 .../kylin/rest/controller/CubeController.java   | 234 -------------------
 .../rest/controller/StreamingController.java    |  33 ++-
 .../kylin/rest/helix/HelixClusterAdmin.java     |  21 +-
 .../helix/StreamCubeBuildTransitionHandler.java |  25 +-
 .../rest/request/StreamingBuildRequest.java     |  16 +-
 .../kylin/rest/service/StreamingService.java    |  25 +-
 .../kylin/source/kafka/KafkaStreamingInput.java |  78 ++++---
 .../kylin/source/kafka/StreamingParser.java     |   6 +-
 21 files changed, 233 insertions(+), 430 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/assembly/src/test/java/org/apache/kylin/job/CubeMetaExtractor.java
----------------------------------------------------------------------
diff --git a/assembly/src/test/java/org/apache/kylin/job/CubeMetaExtractor.java b/assembly/src/test/java/org/apache/kylin/job/CubeMetaExtractor.java
index 527ef0a..3fdce76 100644
--- a/assembly/src/test/java/org/apache/kylin/job/CubeMetaExtractor.java
+++ b/assembly/src/test/java/org/apache/kylin/job/CubeMetaExtractor.java
@@ -225,12 +225,11 @@ public class CubeMetaExtractor extends AbstractApplication {
         return realizationRegistry.getRealization(realizationEntry.getType(), realizationEntry.getRealization());
     }
 
-    private void dealWithStreaming(CubeInstance cube) {
-        for (StreamingConfig streamingConfig : streamingManager.listAllStreaming()) {
-            if (streamingConfig.getCubeName() != null && streamingConfig.getCubeName().equalsIgnoreCase(cube.getName())) {
-                requiredResources.add(StreamingConfig.concatResourcePath(streamingConfig.getName()));
-                requiredResources.add(KafkaConfig.concatResourcePath(streamingConfig.getName()));
-            }
+    private void dealWithStreaming(String tableName) {
+        StreamingConfig streamingConfig = streamingManager.getStreamingConfig(tableName);
+        if (streamingConfig != null) {
+            requiredResources.add(StreamingConfig.concatResourcePath(streamingConfig.getName()));
+            requiredResources.add(KafkaConfig.concatResourcePath(streamingConfig.getName()));
         }
     }
 
@@ -245,11 +244,11 @@ public class CubeMetaExtractor extends AbstractApplication {
             String modelName = cubeDesc.getModelName();
             DataModelDesc modelDesc = metadataManager.getDataModelDesc(modelName);
 
-            dealWithStreaming(cube);
 
             for (String tableName : modelDesc.getAllTables()) {
                 addRequired(requiredResources, TableDesc.concatResourcePath(tableName));
                 addOptional(optionalResources, TableDesc.concatExdResourcePath(tableName));
+                dealWithStreaming(tableName);
             }
 
             addRequired(requiredResources, DataModelDesc.concatResourcePath(modelDesc.getName()));

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISourceAware.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISourceAware.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISourceAware.java
index 3d89f40..8cfda15 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISourceAware.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISourceAware.java
@@ -21,6 +21,7 @@ package org.apache.kylin.metadata.model;
 public interface ISourceAware {
 
     public static final int ID_HIVE = 0;
+    public static final int ID_STREAMING = 1;
     public static final int ID_SPARKSQL = 5;
 
     int getSourceType();

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/BootstrapConfig.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/BootstrapConfig.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/BootstrapConfig.java
index 2b83b84..a4c4618 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/BootstrapConfig.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/BootstrapConfig.java
@@ -4,9 +4,7 @@ package org.apache.kylin.engine.streaming;
  */
 public class BootstrapConfig {
 
-    private String streaming;
-    private int partitionId = -1;
-
+    private String cubeName;
     private long start = 0L;
     private long end = 0L;
 
@@ -28,12 +26,12 @@ public class BootstrapConfig {
         this.end = end;
     }
 
-    public String getStreaming() {
-        return streaming;
+    public String getCubeName() {
+        return cubeName;
     }
 
-    public void setStreaming(String streaming) {
-        this.streaming = streaming;
+    public void setCubeName(String cubeName) {
+        this.cubeName = cubeName;
     }
 
     public boolean isFillGap() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/IStreamingInput.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/IStreamingInput.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/IStreamingInput.java
index 1cf3d98..4b4cf02 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/IStreamingInput.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/IStreamingInput.java
@@ -34,11 +34,12 @@
 package org.apache.kylin.engine.streaming;
 
 import org.apache.kylin.common.util.StreamingBatch;
+import org.apache.kylin.metadata.realization.RealizationType;
 
 /**
  */
 public interface IStreamingInput {
 
-    StreamingBatch getBatchWithTimeWindow(String streamingConfig, int id, long startTime, long endTime);
+    StreamingBatch getBatchWithTimeWindow(RealizationType realizationType, String realizationName, int id, long startTime, long endTime);
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/OneOffStreamingBuilder.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/OneOffStreamingBuilder.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/OneOffStreamingBuilder.java
index 3fbade2..6bad000 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/OneOffStreamingBuilder.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/OneOffStreamingBuilder.java
@@ -43,6 +43,7 @@ import org.apache.kylin.metadata.model.IBuildable;
 import org.apache.kylin.metadata.model.TblColRef;
 
 import com.google.common.base.Preconditions;
+import org.apache.kylin.metadata.realization.RealizationType;
 
 /**
  */
@@ -53,23 +54,25 @@ public class OneOffStreamingBuilder {
     private final StreamingBatchBuilder streamingBatchBuilder;
     private final long startTime;
     private final long endTime;
-    private final String streamingConfig;
+    private final RealizationType realizationType;
+    private final String realizationName;
 
-    public OneOffStreamingBuilder(String streamingConfig, long startTime, long endTime) {
+    public OneOffStreamingBuilder(RealizationType realizationType, String realizationName, long startTime, long endTime) {
         Preconditions.checkArgument(startTime < endTime);
         this.startTime = startTime;
         this.endTime = endTime;
-        this.streamingConfig = Preconditions.checkNotNull(streamingConfig);
-        this.streamingInput = Preconditions.checkNotNull(StreamingUtils.getStreamingInput(streamingConfig));
-        this.streamingOutput = Preconditions.checkNotNull(StreamingUtils.getStreamingOutput(streamingConfig));
-        this.streamingBatchBuilder = Preconditions.checkNotNull(StreamingUtils.getMicroBatchBuilder(streamingConfig));
+        this.realizationType = Preconditions.checkNotNull(realizationType);
+        this.realizationName = Preconditions.checkNotNull(realizationName);
+        this.streamingInput = Preconditions.checkNotNull(StreamingUtils.getStreamingInput());
+        this.streamingOutput = Preconditions.checkNotNull(StreamingUtils.getStreamingOutput());
+        this.streamingBatchBuilder = Preconditions.checkNotNull(StreamingUtils.getMicroBatchBuilder(realizationType, realizationName));
     }
 
     public Runnable build() {
         return new Runnable() {
             @Override
             public void run() {
-                StreamingBatch streamingBatch = streamingInput.getBatchWithTimeWindow(streamingConfig, -1, startTime, endTime);
+                StreamingBatch streamingBatch = streamingInput.getBatchWithTimeWindow(realizationType, realizationName, -1, startTime, endTime);
                 final IBuildable buildable = streamingBatchBuilder.createBuildable(streamingBatch);
                 final Map<Long, HyperLogLogPlusCounter> samplingResult = streamingBatchBuilder.sampling(streamingBatch);
                 final Map<TblColRef, Dictionary<String>> dictionaryMap = streamingBatchBuilder.buildDictionary(streamingBatch, buildable);

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingConfig.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingConfig.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingConfig.java
index ee9aed8..a6e69db 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingConfig.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingConfig.java
@@ -40,6 +40,7 @@ import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -56,37 +57,33 @@ public class StreamingConfig extends RootPersistentEntity {
 
     public static Serializer<StreamingConfig> SERIALIZER = new JsonSerializer<StreamingConfig>(StreamingConfig.class);
 
+    public static final String STREAMING_TYPE_KAFKA = "kafka";
+
     @JsonProperty("name")
     private String name;
 
-    @JsonProperty("iiName")
-    private String iiName;
-
-    @JsonProperty("cubeName")
-    private String cubeName;
-
-    @JsonProperty("partitions")
-    private List<String> partitions;
+    @JsonProperty("type")
+    private String type = STREAMING_TYPE_KAFKA;
 
     @JsonProperty("max_gap")
     private long maxGap = 30 * 60 * 1000l; // 30 minutes
+
     @JsonProperty("max_gap_number")
     private int maxGapNumber = 10; // 10
-    
-    public String getCubeName() {
-        return cubeName;
-    }
 
-    public void setCubeName(String cubeName) {
-        this.cubeName = cubeName;
+    @JsonProperty("partitions")
+    private Map<String, List<String>> partitions; // realization partition info, key is realization name
+    
+    public String getType() {
+        return type;
     }
 
-    public String getIiName() {
-        return iiName;
+    public void setType(String type) {
+        this.type = type;
     }
 
-    public void setIiName(String iiName) {
-        this.iiName = iiName;
+    public String getResourcePath() {
+        return concatResourcePath(name);
     }
 
     public String getName() {
@@ -97,20 +94,8 @@ public class StreamingConfig extends RootPersistentEntity {
         this.name = name;
     }
 
-    public String getResourcePath() {
-        return concatResourcePath(name);
-    }
-
-    public static String concatResourcePath(String streamingName) {
-        return ResourceStore.STREAMING_RESOURCE_ROOT + "/" + streamingName + ".json";
-    }
-
-    public List<String> getPartitions() {
-        return partitions;
-    }
-
-    public void setPartitions(List<String> partitions) {
-        this.partitions = partitions;
+    public static String concatResourcePath(String name) {
+        return ResourceStore.STREAMING_RESOURCE_ROOT + "/" + name + ".json";
     }
 
     public long getMaxGap() {
@@ -129,6 +114,14 @@ public class StreamingConfig extends RootPersistentEntity {
         this.maxGapNumber = maxGapNumber;
     }
 
+    public Map<String, List<String>> getPartitions() {
+        return partitions;
+    }
+
+    public void setPartitions(Map<String, List<String>> partitions) {
+        this.partitions = partitions;
+    }
+
     @Override
     public StreamingConfig clone() {
         try {

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java
index 5c1c11e..6020e7d 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/StreamingManager.java
@@ -46,6 +46,7 @@ import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.common.persistence.Serializer;
 import org.apache.kylin.common.restclient.Broadcaster;
 import org.apache.kylin.common.restclient.CaseInsensitiveStringCache;
+import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.metadata.MetadataConstants;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -104,16 +105,27 @@ public class StreamingManager {
         }
     }
 
+    private static String formatStreamingConfigPath(String name) {
+        return ResourceStore.STREAMING_RESOURCE_ROOT + "/" + name + ".json";
+    }
+
+    private static String formatStreamingOutputPath(String streaming, int partition) {
+        return ResourceStore.STREAMING_OUTPUT_RESOURCE_ROOT + "/" + streaming + "_" + partition + ".json";
+    }
+
+    private static String formatStreamingOutputPath(String streaming, List<Integer> partitions) {
+        return ResourceStore.STREAMING_OUTPUT_RESOURCE_ROOT + "/" + streaming + "_" + StringUtils.join(partitions, "_") + ".json";
+    }
+
     public StreamingConfig getStreamingConfig(String name) {
         return streamingMap.get(name);
     }
 
-    public StreamingConfig getStreamingConfigByCube(String cubeName) {
-        String streamingConfig = cubeName + "_streaming";
-        return getStreamingConfig(streamingConfig);
+    public StreamingConfig getStreamingConfigByCubeName(String cube) {
+        String factTable = CubeManager.getInstance(this.config).getCube(cube).getFactTable();
+        return getStreamingConfig(factTable);
     }
 
-
     public List<StreamingConfig> listAllStreaming() {
         return new ArrayList<>(streamingMap.values());
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java
index 88f5e18..e7660b6 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cli/StreamingCLI.java
@@ -44,6 +44,7 @@ import org.apache.kylin.engine.streaming.OneOffStreamingBuilder;
 import org.apache.kylin.engine.streaming.StreamingConfig;
 import org.apache.kylin.engine.streaming.StreamingManager;
 import org.apache.kylin.engine.streaming.monitor.StreamingMonitor;
+import org.apache.kylin.metadata.realization.RealizationType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -55,7 +56,7 @@ public class StreamingCLI {
 
     public static void main(String[] args) {
         try {
-            Preconditions.checkArgument(args[0].equals("streaming"));
+            Preconditions.checkArgument(args[0].equals("cube"));
             Preconditions.checkArgument(args[1].equals("start"));
 
             int i = 2;
@@ -69,8 +70,8 @@ public class StreamingCLI {
                 case "-end":
                     bootstrapConfig.setEnd(Long.parseLong(args[++i]));
                     break;
-                case "-streaming":
-                    bootstrapConfig.setStreaming(args[++i]);
+                case "-cube":
+                    bootstrapConfig.setCubeName(args[++i]);
                     break;
                 case "-fillGap":
                     bootstrapConfig.setFillGap(Boolean.parseBoolean(args[++i]));
@@ -81,14 +82,14 @@ public class StreamingCLI {
                 i++;
             }
             if (bootstrapConfig.isFillGap()) {
-                final StreamingConfig streamingConfig = StreamingManager.getInstance(KylinConfig.getInstanceFromEnv()).getStreamingConfig(bootstrapConfig.getStreaming());
-                final List<Pair<Long, Long>> gaps = StreamingMonitor.findGaps(streamingConfig.getCubeName(), streamingConfig.getMaxGap());
+                StreamingConfig streamingConfig = StreamingManager.getInstance(KylinConfig.getInstanceFromEnv()).getStreamingConfigByCubeName(bootstrapConfig.getCubeName());
+                final List<Pair<Long, Long>> gaps = StreamingMonitor.findGaps(bootstrapConfig.getCubeName(), streamingConfig.getMaxGap());
                 logger.info("all gaps:" + StringUtils.join(gaps, ","));
                 for (Pair<Long, Long> gap : gaps) {
-                    startOneOffCubeStreaming(bootstrapConfig.getStreaming(), gap.getFirst(), gap.getSecond());
+                    startOneOffCubeStreaming(bootstrapConfig.getCubeName(), gap.getFirst(), gap.getSecond());
                 }
             } else {
-                startOneOffCubeStreaming(bootstrapConfig.getStreaming(), bootstrapConfig.getStart(), bootstrapConfig.getEnd());
+                startOneOffCubeStreaming(bootstrapConfig.getCubeName(), bootstrapConfig.getStart(), bootstrapConfig.getEnd());
                 logger.info("streaming process finished, exit with 0");
                 System.exit(0);
             }
@@ -99,8 +100,8 @@ public class StreamingCLI {
         }
     }
     
-    private static void startOneOffCubeStreaming(String streaming, long start, long end) {
-        final Runnable runnable = new OneOffStreamingBuilder(streaming, start, end).build();
+    private static void startOneOffCubeStreaming(String cubeName, long start, long end) {
+        final Runnable runnable = new OneOffStreamingBuilder(RealizationType.CUBE, cubeName, start, end).build();
         runnable.run();
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/util/StreamingUtils.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/util/StreamingUtils.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/util/StreamingUtils.java
index 0ae7143..66a0af2 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/util/StreamingUtils.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/util/StreamingUtils.java
@@ -43,29 +43,27 @@ import org.apache.kylin.engine.streaming.StreamingManager;
 import org.apache.kylin.engine.streaming.cube.StreamingCubeBuilder;
 
 import com.google.common.base.Preconditions;
+import org.apache.kylin.metadata.realization.RealizationType;
 
 /**
  * TODO: like MRUtil, use Factory pattern to allow config
  */
 public class StreamingUtils {
 
-    public static IStreamingInput getStreamingInput(String streaming) {
+    public static IStreamingInput getStreamingInput() {
         return (IStreamingInput) ClassUtil.newInstance("org.apache.kylin.source.kafka.KafkaStreamingInput");
     }
 
-    public static IStreamingOutput getStreamingOutput(String streaming) {
+    public static IStreamingOutput getStreamingOutput() {
         return (IStreamingOutput) ClassUtil.newInstance("org.apache.kylin.storage.hbase.steps.HBaseStreamingOutput");
     }
 
-    public static StreamingBatchBuilder getMicroBatchBuilder(String streaming) {
-        final StreamingConfig streamingConfig = StreamingManager.getInstance(KylinConfig.getInstanceFromEnv()).getStreamingConfig(streaming);
-        Preconditions.checkNotNull(streamingConfig);
-        if (streamingConfig.getCubeName() != null) {
-            return new StreamingCubeBuilder(streamingConfig.getCubeName());
-        } else if (streamingConfig.getIiName() != null) {
-            throw new UnsupportedOperationException("not implemented yet");
+    public static StreamingBatchBuilder getMicroBatchBuilder(RealizationType realizationType, String realizationName) {
+        Preconditions.checkNotNull(realizationName);
+        if (realizationType == RealizationType.CUBE) {
+            return new StreamingCubeBuilder(realizationName);
         } else {
-            throw new UnsupportedOperationException("StreamingConfig is not valid");
+            throw new UnsupportedOperationException("not implemented yet");
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/examples/test_case_data/localmeta/kafka/default.streaming_table.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/kafka/default.streaming_table.json b/examples/test_case_data/localmeta/kafka/default.streaming_table.json
new file mode 100644
index 0000000..c99b8e5
--- /dev/null
+++ b/examples/test_case_data/localmeta/kafka/default.streaming_table.json
@@ -0,0 +1,21 @@
+{
+  "version":"2.1",
+  "uuid": "8b2b9dfe-777c-4d39-bf89-8472ec909193",
+  "name": "default.streaming_table",
+  "topic": "test_streaming_table_topic_xyz",
+  "timeout": 60000,
+  "bufferSize": 65536,
+  "parserName": "org.apache.kylin.source.kafka.TimedJsonStreamParser",
+  "last_modified": 0,
+  "clusters": [
+    {
+      "brokers": [
+        {
+          "id": 0,
+          "host": "sandbox",
+          "port": 6667
+        }
+      ]
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/examples/test_case_data/localmeta/kafka/test_streaming_table_cube.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/kafka/test_streaming_table_cube.json b/examples/test_case_data/localmeta/kafka/test_streaming_table_cube.json
deleted file mode 100644
index 554fa62..0000000
--- a/examples/test_case_data/localmeta/kafka/test_streaming_table_cube.json
+++ /dev/null
@@ -1,22 +0,0 @@
-{
-  "version":"2.1",
-  "uuid": "8b2b9dfe-777c-4d39-bf89-8472ec909193",
-  "name": "test_streaming_table_cube",
-  "topic": "test_streaming_table_topic_xyz",
-  "timeout": 60000,
-  "bufferSize": 65536,
-  "parserName": "org.apache.kylin.source.kafka.TimedJsonStreamParser",
-  "partition": 1,
-  "last_modified": 0,
-  "clusters": [
-    {
-      "brokers": [
-        {
-          "id": 0,
-          "host": "sandbox",
-          "port": 6667
-        }
-      ]
-    }
-  ]
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/examples/test_case_data/localmeta/streaming/default.streaming_table.json
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/streaming/default.streaming_table.json b/examples/test_case_data/localmeta/streaming/default.streaming_table.json
new file mode 100644
index 0000000..6eb4a88
--- /dev/null
+++ b/examples/test_case_data/localmeta/streaming/default.streaming_table.json
@@ -0,0 +1,6 @@
+{
+  "uuid": "8b2b9dfe-777c-4d39-bf89-8472ec909193",
+  "name": "default.streaming_table",
+  "type": "kafka",
+  "last_modified": 0
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
index 27226e7..eeff999 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
@@ -27,10 +27,13 @@ import org.apache.kylin.common.util.AbstractKylinTestCase;
 import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.common.util.DateFormat;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.engine.streaming.OneOffStreamingBuilder;
 import org.apache.kylin.engine.streaming.StreamingConfig;
 import org.apache.kylin.engine.streaming.StreamingManager;
 import org.apache.kylin.job.DeployUtil;
+import org.apache.kylin.metadata.realization.RealizationType;
 import org.apache.kylin.source.kafka.KafkaConfigManager;
 import org.apache.kylin.source.kafka.config.KafkaConfig;
 import org.slf4j.Logger;
@@ -42,7 +45,7 @@ import org.slf4j.LoggerFactory;
 public class BuildCubeWithStream {
 
     private static final Logger logger = LoggerFactory.getLogger(BuildCubeWithStream.class);
-    private static final String streamingName = "test_streaming_table_cube";
+    private static final String cubeName = "test_streaming_table_cube";
     private static final long startTime = DateFormat.stringToMillis("2015-01-01 00:00:00");
     private static final long endTime = DateFormat.stringToMillis("2015-01-03 00:00:00");
     private static final long batchInterval = 16 * 60 * 60 * 1000;//16 hours
@@ -75,15 +78,16 @@ public class BuildCubeWithStream {
         DeployUtil.overrideJobJarLocations();
 
         kylinConfig = KylinConfig.getInstanceFromEnv();
-
-        final StreamingConfig config = StreamingManager.getInstance(KylinConfig.getInstanceFromEnv()).getStreamingConfig(streamingName);
+        final CubeInstance cubeInstance = CubeManager.getInstance(kylinConfig).getCube(cubeName);
+        final String factTable = cubeInstance.getFactTable();
+        final StreamingConfig config = StreamingManager.getInstance(kylinConfig).getStreamingConfig(factTable);
 
         //Use a random topic for kafka data stream
-        KafkaConfig streamingConfig = KafkaConfigManager.getInstance(kylinConfig).getKafkaConfig(streamingName);
+        KafkaConfig streamingConfig = KafkaConfigManager.getInstance(kylinConfig).getKafkaConfig(config.getName());
         streamingConfig.setTopic(UUID.randomUUID().toString());
         KafkaConfigManager.getInstance(kylinConfig).saveKafkaConfig(streamingConfig);
 
-        DeployUtil.prepareTestDataForStreamingCube(startTime, endTime, config.getCubeName(), streamingConfig);
+        DeployUtil.prepareTestDataForStreamingCube(startTime, endTime, cubeName, streamingConfig);
     }
 
     public static void afterClass() throws Exception {
@@ -94,7 +98,7 @@ public class BuildCubeWithStream {
         logger.info("start time:" + startTime + " end time:" + endTime + " batch interval:" + batchInterval + " batch count:" + ((endTime - startTime) / batchInterval));
         for (long start = startTime; start < endTime; start += batchInterval) {
             logger.info(String.format("build batch:{%d, %d}", start, start + batchInterval));
-            new OneOffStreamingBuilder(streamingName, start, start + batchInterval).build().run();
+            new OneOffStreamingBuilder(RealizationType.CUBE, cubeName, start, start + batchInterval).build().run();
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java b/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java
index 4ab640f..964e8d6 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/CubeController.java
@@ -289,92 +289,6 @@ public class CubeController extends BasicController {
             throw new InternalErrorException("Failed to clone cube ", e);
         }
 
-        boolean isStreamingCube = false, cloneStreamingConfigSuccess = false, cloneKafkaConfigSuccess = false;
-
-
-        List<StreamingConfig> streamingConfigs = null;
-        try {
-            streamingConfigs = streamingService.listAllStreamingConfigs(cubeName);
-            if (streamingConfigs.size() != 0) {
-                isStreamingCube = true;
-            }
-
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-
-        StreamingConfig newStreamingConfig = null;
-        KafkaConfig newKafkaConfig = null;
-
-        try {
-
-            if (isStreamingCube) {
-
-                isStreamingCube = true;
-                newStreamingConfig = streamingConfigs.get(0).clone();
-                newStreamingConfig.setName(newCubeName + "_STREAMING");
-                newStreamingConfig.updateRandomUuid();
-                newStreamingConfig.setLastModified(0);
-                newStreamingConfig.setCubeName(newCubeName);
-                try {
-                    streamingService.createStreamingConfig(newStreamingConfig);
-                    cloneStreamingConfigSuccess = true;
-                } catch (IOException e) {
-                    throw new InternalErrorException("Failed to clone streaming config. ", e);
-                }
-
-                //StreamingConfig name and KafkaConfig name is the same for same cube
-                String kafkaConfigName = streamingConfigs.get(0).getName();
-                KafkaConfig kafkaConfig = null;
-                try {
-                    kafkaConfig = kafkaConfigService.getKafkaConfig(kafkaConfigName);
-                    if (kafkaConfig != null) {
-                        newKafkaConfig = kafkaConfig.clone();
-                        newKafkaConfig.setName(newStreamingConfig.getName());
-                        newKafkaConfig.setLastModified(0);
-                        newKafkaConfig.updateRandomUuid();
-                    }
-                } catch (IOException e) {
-                    throw new InternalErrorException("Failed to get kafka config info. ", e);
-                }
-
-                try {
-                    kafkaConfigService.createKafkaConfig(newKafkaConfig);
-                    cloneKafkaConfigSuccess = true;
-                } catch (IOException e) {
-                    throw new InternalErrorException("Failed to clone streaming config. ", e);
-                }
-            }
-        } finally {
-
-            //rollback if failed
-            if (isStreamingCube) {
-                if (cloneStreamingConfigSuccess == false || cloneKafkaConfigSuccess == false) {
-                    try {
-                        cubeService.deleteCube(newCube);
-                    } catch (Exception ex) {
-                        throw new InternalErrorException("Failed, and failed to rollback on delete cube. " + " Caused by: " + ex.getMessage(), ex);
-                    }
-                    if (cloneStreamingConfigSuccess == true) {
-                        try {
-                            streamingService.dropStreamingConfig(newStreamingConfig);
-                        } catch (IOException e) {
-                            throw new InternalErrorException("Failed to clone cube, and StreamingConfig created and failed to delete: " + e.getLocalizedMessage());
-                        }
-                    }
-                    if (cloneKafkaConfigSuccess == true) {
-                        try {
-                            kafkaConfigService.dropKafkaConfig(newKafkaConfig);
-                        } catch (IOException e) {
-                            throw new InternalErrorException("Failed to clone cube, and KafkaConfig created and failed to delete: " + e.getLocalizedMessage());
-                        }
-                    }
-
-                }
-
-            }
-        }
-
         return newCube;
 
     }
@@ -405,27 +319,6 @@ public class CubeController extends BasicController {
             throw new NotFoundException("Cube with name " + cubeName + " not found..");
         }
 
-        //drop related StreamingConfig KafkaConfig if exist
-        try {
-            List<StreamingConfig> configs = streamingService.listAllStreamingConfigs(cubeName);
-            for (StreamingConfig config : configs) {
-                try {
-                    streamingService.dropStreamingConfig(config);
-                } catch (IOException e) {
-                    logger.error(e.getLocalizedMessage(), e);
-                    throw new InternalErrorException("Failed to delete StreamingConfig. " + " Caused by: " + e.getMessage(), e);
-                }
-                try {
-                    KafkaConfig kfkConfig = kafkaConfigService.getKafkaConfig(config.getName());
-                    kafkaConfigService.dropKafkaConfig(kfkConfig);
-                } catch (IOException e) {
-                    throw new InternalErrorException("Failed to delete KafkaConfig. " + " Caused by: " + e.getMessage(), e);
-                }
-            }
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-
         //drop Cube
         try {
             cubeService.deleteCube(cube);
@@ -592,133 +485,6 @@ public class CubeController extends BasicController {
             return cubeRequest;
         }
 
-        boolean updateStreamingConfigSuccess = false, updateKafkaConfigSuccess = false;
-
-        boolean isStreamingCube = cubeRequest.getStreamingCube() != null && cubeRequest.getStreamingCube().equals("true");
-
-        //oldConfig is for recover use
-        StreamingConfig streamingConfig = null, oldStreamingConfig = null;
-        KafkaConfig kafkaConfig = null, oldKafkaConfig = null;
-        if (isStreamingCube) {
-            streamingConfig = deserializeStreamingDesc(cubeRequest);
-            kafkaConfig = deserializeKafkaDesc(cubeRequest);
-            try {
-                oldKafkaConfig = kafkaConfigService.getKafkaConfig(kafkaConfig.getName());
-            } catch (IOException e) {
-                e.printStackTrace();
-            }
-            oldStreamingConfig = streamingService.getStreamingManager().getStreamingConfig(streamingConfig.getName());
-        }
-        try {
-            //streaming Cube
-            if (isStreamingCube) {
-                if (streamingConfig == null) {
-                    cubeRequest.setMessage("No StreamingConfig info to update.");
-                    return cubeRequest;
-                }
-                if (kafkaConfig == null) {
-                    cubeRequest.setMessage("No KafkaConfig info to update.");
-                    return cubeRequest;
-                }
-
-                if (oldStreamingConfig == null) {
-                    streamingConfig.setUuid(UUID.randomUUID().toString());
-                    try {
-                        streamingService.createStreamingConfig(streamingConfig);
-                        updateStreamingConfigSuccess = true;
-                    } catch (IOException e) {
-                        logger.error("Failed to add StreamingConfig:" + e.getLocalizedMessage(), e);
-                        throw new InternalErrorException("Failed to add StreamingConfig: " + e.getLocalizedMessage());
-                    }
-                } else {
-                    try {
-                        streamingConfig = streamingService.updateStreamingConfig(streamingConfig);
-                        updateStreamingConfigSuccess = true;
-
-                    } catch (IOException e) {
-                        logger.error("Failed to update StreamingConfig:" + e.getLocalizedMessage(), e);
-                        throw new InternalErrorException("Failed to update StreamingConfig: " + e.getLocalizedMessage());
-                    }
-                }
-                if (oldKafkaConfig == null) {
-                    kafkaConfig.setUuid(UUID.randomUUID().toString());
-                    try {
-                        kafkaConfigService.createKafkaConfig(kafkaConfig);
-                        updateKafkaConfigSuccess = true;
-                    } catch (IOException e) {
-                        logger.error("Failed to add KafkaConfig:" + e.getLocalizedMessage(), e);
-                        throw new InternalErrorException("Failed to add KafkaConfig: " + e.getLocalizedMessage());
-                    }
-
-                } else {
-                    try {
-                        kafkaConfig = kafkaConfigService.updateKafkaConfig(kafkaConfig);
-                        updateKafkaConfigSuccess = true;
-                    } catch (IOException e) {
-                        logger.error("Failed to update KafkaConfig:" + e.getLocalizedMessage(), e);
-                        throw new InternalErrorException("Failed to update KafkaConfig: " + e.getLocalizedMessage());
-                    }
-                }
-
-            }
-        } finally {
-            if (isStreamingCube) {
-                //recover cube desc
-                if (updateStreamingConfigSuccess == false || updateKafkaConfigSuccess == false) {
-                    oldCubeDesc.setLastModified(desc.getLastModified());
-                    CubeInstance cube = cubeService.getCubeManager().getCube(cubeRequest.getCubeName());
-                    try {
-                        desc = cubeService.updateCubeAndDesc(cube, oldCubeDesc, projectName);
-                    } catch (Exception e) {
-                        logger.error("Failed to recover CubeDesc:" + e.getLocalizedMessage(), e);
-                        throw new InternalErrorException("Failed to recover CubeDesc: " + e.getLocalizedMessage());
-                    }
-
-                    if (updateStreamingConfigSuccess == true) {
-
-                        if (oldStreamingConfig != null) {
-
-                            oldStreamingConfig.setLastModified(streamingConfig.getLastModified());
-                            try {
-                                streamingService.updateStreamingConfig(oldStreamingConfig);
-                            } catch (IOException e) {
-                                logger.error("Failed to recover StreamingConfig:" + e.getLocalizedMessage(), e);
-                                throw new InternalErrorException("Failed to recover StreamingConfig: " + e.getLocalizedMessage());
-                            }
-                        } else {
-                            try {
-                                streamingService.dropStreamingConfig(streamingConfig);
-                            } catch (IOException e) {
-                                logger.error("Failed to remove added StreamingConfig:" + e.getLocalizedMessage(), e);
-                                throw new InternalErrorException("Failed to remove added StreamingConfig: " + e.getLocalizedMessage());
-                            }
-                        }
-                    }
-
-                    if (updateKafkaConfigSuccess == true) {
-                        if (oldKafkaConfig != null) {
-                            oldKafkaConfig.setLastModified(kafkaConfig.getLastModified());
-                            try {
-                                kafkaConfigService.updateKafkaConfig(oldKafkaConfig);
-                            } catch (IOException e) {
-                                logger.error("Failed to recover KafkaConfig:" + e.getLocalizedMessage(), e);
-                                throw new InternalErrorException("Failed to recover KafkaConfig: " + e.getLocalizedMessage());
-                            }
-                        } else {
-                            try {
-                                kafkaConfigService.dropKafkaConfig(kafkaConfig);
-                            } catch (IOException e) {
-                                logger.error("Failed to remove added KafkaConfig:" + e.getLocalizedMessage(), e);
-                                throw new InternalErrorException("Failed to remove added KafkaConfig: " + e.getLocalizedMessage());
-                            }
-                        }
-                    }
-
-                }
-            }
-
-        }
-
         String descData = JsonUtil.writeValueAsIndentString(desc);
         cubeRequest.setCubeDescData(descData);
         cubeRequest.setSuccessful(true);

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java b/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
index 74b0dae..c61a551 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/StreamingController.java
@@ -70,9 +70,9 @@ public class StreamingController extends BasicController {
 
     @RequestMapping(value = "/getConfig", method = { RequestMethod.GET })
     @ResponseBody
-    public List<StreamingConfig> getStreamings(@RequestParam(value = "cubeName", required = false) String cubeName, @RequestParam(value = "limit", required = false) Integer limit, @RequestParam(value = "offset", required = false) Integer offset) {
+    public List<StreamingConfig> getStreamings(@RequestParam(value = "table", required = false) String table, @RequestParam(value = "limit", required = false) Integer limit, @RequestParam(value = "offset", required = false) Integer offset) {
         try {
-            return streamingService.getStreamingConfigs(cubeName, limit, offset);
+            return streamingService.getStreamingConfigs(table, limit, offset);
         } catch (IOException e) {
             logger.error("Failed to deal with the request:" + e.getLocalizedMessage(), e);
             throw new InternalErrorException("Failed to deal with the request: " + e.getLocalizedMessage());
@@ -166,11 +166,11 @@ public class StreamingController extends BasicController {
 
     @RequestMapping(value = "/{configName}", method = { RequestMethod.DELETE })
     @ResponseBody
-    public void deleteConfig(@PathVariable String configName) throws IOException {
-        StreamingConfig config = streamingService.getStreamingManager().getStreamingConfig(configName);
-        KafkaConfig kafkaConfig = kafkaConfigService.getKafkaConfig(configName);
+    public void deleteConfig(@PathVariable String table) throws IOException {
+        StreamingConfig config = streamingService.getStreamingManager().getStreamingConfig(table);
+        KafkaConfig kafkaConfig = kafkaConfigService.getKafkaConfig(table);
         if (null == config) {
-            throw new NotFoundException("StreamingConfig with name " + configName + " not found..");
+            throw new NotFoundException("StreamingConfig with name " + table + " not found..");
         }
         try {
             streamingService.dropStreamingConfig(config);
@@ -232,7 +232,7 @@ public class StreamingController extends BasicController {
     @RequestMapping(value = "/{cubeName}/build", method = { RequestMethod.PUT })
     @ResponseBody
     public StreamingBuildRequest buildStream(@PathVariable String cubeName, @RequestBody StreamingBuildRequest streamingBuildRequest) {
-        StreamingConfig streamingConfig = streamingService.getStreamingManager().getStreamingConfigByCube(cubeName);
+        StreamingConfig streamingConfig = streamingService.getStreamingManager().getStreamingConfigByCubeName(cubeName);
         Preconditions.checkNotNull(streamingConfig, "Stream config for '" + cubeName + "' is not found.");
         List<CubeInstance> cubes = cubeService.getCubes(cubeName, null, null, null, null);
         Preconditions.checkArgument(cubes.size() == 1, "Cube '" + cubeName + "' is not found.");
@@ -250,13 +250,13 @@ public class StreamingController extends BasicController {
             }
         }
 
-        streamingBuildRequest.setStreaming(streamingConfig.getName());
+        streamingBuildRequest.setCubeName(cubeName);
         try {
             streamingService.buildStream(cube, streamingBuildRequest);
         } catch (IOException e) {  
             logger.error("", e);
             streamingBuildRequest.setSuccessful(false);
-            streamingBuildRequest.setMessage("Failed to submit job for " + streamingBuildRequest.getStreaming() + ", error is: " + e.getMessage());
+            streamingBuildRequest.setMessage("Failed to submit job for " + streamingBuildRequest.getCubeName() + ", error is: " + e.getMessage());
             return streamingBuildRequest;
         }
         streamingBuildRequest.setMessage("Build request is submitted successfully.");
@@ -275,14 +275,14 @@ public class StreamingController extends BasicController {
     @RequestMapping(value = "/{cubeName}/fillgap", method = { RequestMethod.PUT })
     @ResponseBody
     public StreamingBuildRequest fillGap(@PathVariable String cubeName) {
-        StreamingConfig streamingConfig = streamingService.getStreamingManager().getStreamingConfigByCube(cubeName);
+        StreamingConfig streamingConfig = streamingService.getStreamingManager().getStreamingConfigByCubeName(cubeName);
         Preconditions.checkNotNull(streamingConfig, "Stream config for '" + cubeName + "' is not found.");
         List<CubeInstance> cubes = cubeService.getCubes(cubeName, null, null, null, null);
         Preconditions.checkArgument(cubes.size() == 1, "Cube '" + cubeName + "' is not found.");
         CubeInstance cube = cubes.get(0);
 
         StreamingBuildRequest streamingBuildRequest = new StreamingBuildRequest();
-        streamingBuildRequest.setStreaming(streamingConfig.getName());
+        streamingBuildRequest.setCubeName(cubeName);
         List<Pair<Long, Long>> gaps = null;
         try {
             gaps = streamingService.fillGap(cube);
@@ -306,17 +306,14 @@ public class StreamingController extends BasicController {
     @RequestMapping(value = "/{cubeName}/checkgap", method = { RequestMethod.PUT })
     @ResponseBody
     public StreamingBuildRequest checkGap(@PathVariable String cubeName) {
-        StreamingConfig streamingConfig = streamingService.getStreamingManager().getStreamingConfigByCube(cubeName);
+        StreamingConfig streamingConfig = streamingService.getStreamingManager().getStreamingConfigByCubeName(cubeName);
         Preconditions.checkNotNull(streamingConfig, "Stream config for '" + cubeName + "' is not found.");
-        List<CubeInstance> cubes = cubeService.getCubes(cubeName, null, null, null, null);
-        Preconditions.checkArgument(cubes.size() == 1, "Cube '" + cubeName + "' is not found.");
-        CubeInstance cube = cubes.get(0);
 
-        List<Pair<Long, Long>> gaps = StreamingMonitor.findGaps(streamingConfig.getCubeName(), streamingConfig.getMaxGap());
+        List<Pair<Long, Long>> gaps = StreamingMonitor.findGaps(cubeName, streamingConfig.getMaxGap());
         logger.info("all gaps:" + StringUtils.join(gaps, ","));
         
         StreamingBuildRequest streamingBuildRequest = new StreamingBuildRequest();
-        streamingBuildRequest.setStreaming(streamingConfig.getName());
+        streamingBuildRequest.setCubeName(cubeName);
         if (gaps.size() > 0) {
             streamingBuildRequest.setMessage(gaps.size() + " gaps in cube: " + StringUtils.join(gaps, ","));
         } else {
@@ -327,8 +324,6 @@ public class StreamingController extends BasicController {
 
     }
 
-    
-
     public void setStreamingService(StreamingService streamingService) {
         this.streamingService = streamingService;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java b/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
index 680e371..c110cb8 100644
--- a/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
+++ b/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
@@ -137,10 +137,14 @@ public class HelixClusterAdmin {
 
         IdealState idealState = admin.getResourceIdealState(clusterName, resourceName);
 
-        StreamingConfig streamingConfig = StreamingManager.getInstance(kylinConfig).getStreamingConfig(streamingBuildRequest.getStreaming());
-        List<String> partitions = streamingConfig.getPartitions();
+        String cubeName = streamingBuildRequest.getCubeName();
+        StreamingConfig streamingConfig = StreamingManager.getInstance(kylinConfig).getStreamingConfigByCubeName(cubeName);
+        if (streamingConfig.getPartitions() == null) {
+            streamingConfig.setPartitions(Maps.<String, List<String>>newConcurrentMap());
+        }
+        List<String> partitions = streamingConfig.getPartitions().get(cubeName);
         if (partitions == null) {
-            partitions = Lists.newArrayList();
+            partitions = Lists.<String>newArrayList();
         }
 
         if (partitions.size() != idealState.getNumPartitions() || idealState.getNumPartitions() >= kylinConfig.getClusterMaxPartitionPerRegion()) {
@@ -152,14 +156,17 @@ public class HelixClusterAdmin {
             logger.info("Drop and create resource: " + resourceName);
             cleanResourcePartitions(resourceName);
             idealState = admin.getResourceIdealState(clusterName, resourceName);
-            streamingConfig.getPartitions().clear();
+            streamingConfig.getPartitions().get(cubeName).clear();
             StreamingManager.getInstance(kylinConfig).updateStreamingConfig(streamingConfig);
-            streamingConfig = StreamingManager.getInstance(kylinConfig).getStreamingConfig(streamingBuildRequest.getStreaming());
-            partitions = Lists.newArrayList();
+            streamingConfig = StreamingManager.getInstance(kylinConfig).getStreamingConfigByCubeName(cubeName);
+            partitions = streamingConfig.getPartitions().get(cubeName);
+            if (partitions == null) {
+                partitions = Lists.<String>newArrayList();
+            }
         }
 
         partitions.add(streamingBuildRequest.toPartitionName());
-        streamingConfig.setPartitions(partitions);
+        streamingConfig.getPartitions().put(cubeName, partitions);
         StreamingManager.getInstance(kylinConfig).updateStreamingConfig(streamingConfig);
 
         idealState.setNumPartitions(idealState.getNumPartitions() + 1);

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java b/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java
index 4652d0d..2d6b522 100644
--- a/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java
+++ b/server/src/main/java/org/apache/kylin/rest/helix/StreamCubeBuildTransitionHandler.java
@@ -49,7 +49,7 @@ public class StreamCubeBuildTransitionHandler extends TransitionHandler {
         if (streamingBuildRequest != null && isSuccessfullyBuilt(streamingBuildRequest) == false) {
             KylinConfigBase.getKylinHome();
             String segmentId = streamingBuildRequest.toPartitionName();
-            String cmd = KylinConfigBase.getKylinHome() + "/bin/kylin.sh streaming start " + streamingBuildRequest.getStreaming() + " " + segmentId + " -oneoff true -start " + streamingBuildRequest.getStart() + " -end " + streamingBuildRequest.getEnd() + " -streaming " + streamingBuildRequest.getStreaming();
+            String cmd = KylinConfigBase.getKylinHome() + "/bin/kylin.sh streaming start " + streamingBuildRequest.getCubeName() + " " + segmentId + " -oneoff true -start " + streamingBuildRequest.getStart() + " -end " + streamingBuildRequest.getEnd() + " -cube " + streamingBuildRequest.getCubeName();
             runCMD(cmd);
         }
     }
@@ -63,15 +63,14 @@ public class StreamCubeBuildTransitionHandler extends TransitionHandler {
         if (isSuccessfullyBuilt(streamingBuildRequest) == false) {
             KylinConfigBase.getKylinHome();
             String segmentId = streamingBuildRequest.toPartitionName();
-            String cmd = KylinConfigBase.getKylinHome() + "/bin/kylin.sh streaming stop " + streamingBuildRequest.getStreaming() + " " + segmentId;
+            String cmd = KylinConfigBase.getKylinHome() + "/bin/kylin.sh streaming stop " + streamingBuildRequest.getCubeName() + " " + segmentId;
             runCMD(cmd);
         }
         */
     }
 
     private boolean isSuccessfullyBuilt(StreamingBuildRequest streamingBuildRequest) {
-        final StreamingConfig streamingConfig = StreamingManager.getInstance(kylinConfig).getStreamingConfig(streamingBuildRequest.getStreaming());
-        final String cubeName = streamingConfig.getCubeName();
+        final String cubeName = streamingBuildRequest.getCubeName();
         final CubeInstance cube = CubeManager.getInstance(kylinConfig).getCube(cubeName);
         for (CubeSegment segment : cube.getSegments()) {
             if (segment.getDateRangeStart() <= streamingBuildRequest.getStart() && segment.getDateRangeEnd() >= streamingBuildRequest.getEnd()) {
@@ -84,35 +83,35 @@ public class StreamCubeBuildTransitionHandler extends TransitionHandler {
     }
 
     private StreamingBuildRequest getStreamingBuildRequest(String resourceName, String partitionName) {
-        String streamConfigName = resourceName.substring(HelixClusterAdmin.RESOURCE_STREAME_CUBE_PREFIX.length());
+        String cubeName = resourceName.substring(HelixClusterAdmin.RESOURCE_STREAME_CUBE_PREFIX.length());
         int partitionId = Integer.parseInt(partitionName.substring(partitionName.lastIndexOf("_") + 1));
 
-        StreamingConfig streamingConfig = StreamingManager.getInstance(kylinConfig).getStreamingConfig(streamConfigName);
+        StreamingConfig streamingConfig = StreamingManager.getInstance(kylinConfig).getStreamingConfigByCubeName(cubeName);
 
         int retry = 0;
-        while ((streamingConfig.getPartitions() == null || streamingConfig.getPartitions().isEmpty() || partitionId > (streamingConfig.getPartitions().size() - 1) && retry < 10)) {
-            logger.error("No segment information in StreamingConfig '" + streamConfigName + "' for partition " + partitionId);
+        while ((streamingConfig.getPartitions() == null || streamingConfig.getPartitions().get(cubeName) == null || streamingConfig.getPartitions().get(cubeName).isEmpty() || partitionId > (streamingConfig.getPartitions().get(cubeName).size() - 1) && retry < 10)) {
+            logger.error("No segment information in StreamingConfig '" + cubeName + "' for partition " + partitionId);
             logger.error("Wait for 0.5 second...");
             try {
                 Thread.sleep(500);
             } catch (InterruptedException e) {
                 logger.error("", e);
             }
-            streamingConfig = StreamingManager.getInstance(kylinConfig).getStreamingConfig(streamConfigName);
+            streamingConfig = StreamingManager.getInstance(kylinConfig).getStreamingConfigByCubeName(cubeName);
             retry++;
         }
 
         if (retry >= 10) {
-            logger.error("No segment information in StreamingConfig '" + streamConfigName + "' for partition " + partitionId);
-            logger.warn("Abor building...");
+            logger.error("No segment information in StreamingConfig '" + cubeName + "' for partition " + partitionId);
+            logger.warn("Abort building...");
             return null;
         }
 
-        String startEnd = streamingConfig.getPartitions().get(partitionId);
+        String startEnd = streamingConfig.getPartitions().get(cubeName).get(partitionId);
         long start = Long.parseLong(startEnd.substring(0, startEnd.indexOf("_")));
         long end = Long.parseLong(startEnd.substring(startEnd.indexOf("_") + 1));
         StreamingBuildRequest request = new StreamingBuildRequest();
-        request.setStreaming(streamConfigName);
+        request.setCubeName(cubeName);
         request.setStart(start);
         request.setEnd(end);
         return request;

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java b/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java
index 201568e..b37d2e1 100644
--- a/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java
+++ b/server/src/main/java/org/apache/kylin/rest/request/StreamingBuildRequest.java
@@ -25,7 +25,7 @@ import static org.apache.kylin.rest.helix.HelixClusterAdmin.RESOURCE_STREAME_CUB
 
 public class StreamingBuildRequest {
 
-    private String streaming;
+    private String cubeName;
     private long start;
     private long end;
     private String message;
@@ -34,18 +34,18 @@ public class StreamingBuildRequest {
     public StreamingBuildRequest() {
     }
 
-    public StreamingBuildRequest(String streaming, long start, long end) {
-        this.streaming = streaming;
+    public StreamingBuildRequest(String cubeName, long start, long end) {
+        this.cubeName = cubeName;
         this.start = start;
         this.end = end;
     }
 
-    public String getStreaming() {
-        return streaming;
+    public String getCubeName() {
+        return cubeName;
     }
 
-    public void setStreaming(String streaming) {
-        this.streaming = streaming;
+    public void setCubeName(String cubeName) {
+        this.cubeName = cubeName;
     }
 
     public boolean isSuccessful() {
@@ -81,7 +81,7 @@ public class StreamingBuildRequest {
     }
 
     public String toResourceName() {
-        return HelixClusterAdmin.RESOURCE_STREAME_CUBE_PREFIX + streaming;
+        return HelixClusterAdmin.RESOURCE_STREAME_CUBE_PREFIX + cubeName;
     }
     public String toPartitionName() {
         return start + "_" + end;

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java b/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
index 28b9472..f847d5c 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/StreamingService.java
@@ -23,6 +23,7 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.engine.streaming.StreamingConfig;
 import org.apache.kylin.engine.streaming.StreamingManager;
 import org.apache.kylin.engine.streaming.monitor.StreamingMonitor;
@@ -49,26 +50,22 @@ public class StreamingService extends BasicService {
     private AccessService accessService;
 
     @PostFilter(Constant.ACCESS_POST_FILTER_READ)
-    public List<StreamingConfig> listAllStreamingConfigs(final String cubeName) throws IOException {
+    public List<StreamingConfig> listAllStreamingConfigs(final String table) throws IOException {
         List<StreamingConfig> streamingConfigs = new ArrayList();
-        CubeInstance cubeInstance = (null != cubeName) ? getCubeManager().getCube(cubeName) : null;
-        if (null == cubeInstance) {
+        if (StringUtils.isEmpty(table)) {
             streamingConfigs = getStreamingManager().listAllStreaming();
         } else {
-            for (StreamingConfig config : getStreamingManager().listAllStreaming()) {
-                if (cubeInstance.getName().equals(config.getCubeName())) {
-                    streamingConfigs.add(config);
-                }
-            }
+            StreamingConfig config = getStreamingManager().getStreamingConfig(table);
+            streamingConfigs.add(config);
         }
 
         return streamingConfigs;
     }
 
-    public List<StreamingConfig> getStreamingConfigs(final String cubeName, final Integer limit, final Integer offset) throws IOException {
+    public List<StreamingConfig> getStreamingConfigs(final String table, final Integer limit, final Integer offset) throws IOException {
 
         List<StreamingConfig> streamingConfigs;
-        streamingConfigs = listAllStreamingConfigs(cubeName);
+        streamingConfigs = listAllStreamingConfigs(table);
 
         if (limit == null || offset == null) {
             return streamingConfigs;
@@ -113,8 +110,12 @@ public class StreamingService extends BasicService {
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#cube, 'ADMINISTRATION') or hasPermission(#cube, 'OPERATION') or hasPermission(#cube, 'MANAGEMENT')")
     public List<Pair<Long, Long>> fillGap(CubeInstance cube) throws IOException {
         HelixClusterAdmin clusterAdmin = HelixClusterAdmin.getInstance(KylinConfig.getInstanceFromEnv());
-        final StreamingConfig streamingConfig = StreamingManager.getInstance(KylinConfig.getInstanceFromEnv()).getStreamingConfigByCube(cube.getName());
-        final List<Pair<Long, Long>> gaps = StreamingMonitor.findGaps(streamingConfig.getCubeName(), streamingConfig.getMaxGap());
+        final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+        final StreamingConfig streamingConfig = StreamingManager.getInstance(kylinConfig).getStreamingConfig(cube.getFactTable());
+        if (streamingConfig == null) {
+            throw new IllegalArgumentException("Cube '" + cube.getName() + "' is not a streaming cube.");
+        }
+        final List<Pair<Long, Long>> gaps = StreamingMonitor.findGaps(cube.getName(), streamingConfig.getMaxGap());
         logger.info("all gaps:" + StringUtils.join(gaps, ","));
 
         List<Pair<Long, Long>> filledGap = Lists.newArrayList();

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaStreamingInput.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaStreamingInput.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaStreamingInput.java
index 2e262b3..3064fde 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaStreamingInput.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaStreamingInput.java
@@ -47,9 +47,14 @@ import kafka.message.MessageAndOffset;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.engine.streaming.IStreamingInput;
 import org.apache.kylin.common.util.StreamingBatch;
 import org.apache.kylin.common.util.StreamingMessage;
+import org.apache.kylin.engine.streaming.StreamingConfig;
+import org.apache.kylin.engine.streaming.StreamingManager;
+import org.apache.kylin.metadata.realization.RealizationType;
 import org.apache.kylin.source.kafka.config.KafkaClusterConfig;
 import org.apache.kylin.source.kafka.config.KafkaConfig;
 import org.apache.kylin.source.kafka.util.KafkaRequester;
@@ -65,39 +70,54 @@ public class KafkaStreamingInput implements IStreamingInput {
     private static final Logger logger = LoggerFactory.getLogger(KafkaStreamingInput.class);
 
     @Override
-    public StreamingBatch getBatchWithTimeWindow(String streaming, int id, long startTime, long endTime) {
-        try {
+    public StreamingBatch getBatchWithTimeWindow(RealizationType realizationType, String realizationName, int id, long startTime, long endTime) {
+        if (realizationType != RealizationType.CUBE) {
+            throw new IllegalArgumentException("Unsupported realization in KafkaStreamingInput: " + realizationType);
+        }
+        final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+        final CubeInstance cube = CubeManager.getInstance(kylinConfig).getCube(realizationName);
+        final String streaming = cube.getFactTable();
+        final StreamingManager streamingManager = StreamingManager.getInstance(kylinConfig);
+        final StreamingConfig streamingConfig = streamingManager.getStreamingConfig(streaming);
+        if (streamingConfig == null) {
+            throw new IllegalArgumentException("Table " + streaming + " is not a streaming table.");
+        }
+        if (StreamingConfig.STREAMING_TYPE_KAFKA.equals(streamingConfig.getType())) {
             logger.info(String.format("prepare to get streaming batch, name:%s, id:%d, startTime:%d, endTime:%d", streaming, id, startTime, endTime));
-            final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-            final KafkaConfigManager kafkaConfigManager = KafkaConfigManager.getInstance(kylinConfig);
-            final KafkaConfig kafkaConfig = kafkaConfigManager.getKafkaConfig(streaming);
-            final StreamingParser streamingParser = StreamingParser.getStreamingParser(kafkaConfig);
-            final ExecutorService executorService = Executors.newCachedThreadPool();
-            final List<Future<List<StreamingMessage>>> futures = Lists.newArrayList();
-            for (final KafkaClusterConfig kafkaClusterConfig : kafkaConfig.getKafkaClusterConfigs()) {
-                final int partitionCount = KafkaRequester.getKafkaTopicMeta(kafkaClusterConfig).getPartitionIds().size();
-                for (int i = 0; i < partitionCount; ++i) {
-                    final StreamingMessageProducer producer = new StreamingMessageProducer(kafkaClusterConfig, i, Pair.newPair(startTime, endTime), kafkaConfig.getMargin(), streamingParser);
-                    final Future<List<StreamingMessage>> future = executorService.submit(producer);
-                    futures.add(future);
+
+            try {
+                final KafkaConfigManager kafkaConfigManager = KafkaConfigManager.getInstance(kylinConfig);
+                final KafkaConfig kafkaConfig = kafkaConfigManager.getKafkaConfig(streaming);
+                final StreamingParser streamingParser = StreamingParser.getStreamingParser(kafkaConfig, realizationType, realizationName);
+                final ExecutorService executorService = Executors.newCachedThreadPool();
+                final List<Future<List<StreamingMessage>>> futures = Lists.newArrayList();
+                for (final KafkaClusterConfig kafkaClusterConfig : kafkaConfig.getKafkaClusterConfigs()) {
+                    final int partitionCount = KafkaRequester.getKafkaTopicMeta(kafkaClusterConfig).getPartitionIds().size();
+                    for (int i = 0; i < partitionCount; ++i) {
+                        final StreamingMessageProducer producer = new StreamingMessageProducer(kafkaClusterConfig, i, Pair.newPair(startTime, endTime), kafkaConfig.getMargin(), streamingParser);
+                        final Future<List<StreamingMessage>> future = executorService.submit(producer);
+                        futures.add(future);
+                    }
                 }
-            }
-            List<StreamingMessage> messages = Lists.newLinkedList();
-            for (Future<List<StreamingMessage>> future : futures) {
-                try {
-                    messages.addAll(future.get());
-                } catch (InterruptedException e) {
-                    logger.warn("this thread should not be interrupted, just ignore", e);
-                    continue;
-                } catch (ExecutionException e) {
-                    throw new RuntimeException("error when get StreamingMessages",e.getCause());
+                List<StreamingMessage> messages = Lists.newLinkedList();
+                for (Future<List<StreamingMessage>> future : futures) {
+                    try {
+                        messages.addAll(future.get());
+                    } catch (InterruptedException e) {
+                        logger.warn("this thread should not be interrupted, just ignore", e);
+                        continue;
+                    } catch (ExecutionException e) {
+                        throw new RuntimeException("error when get StreamingMessages", e.getCause());
+                    }
                 }
+                final Pair<Long, Long> timeRange = Pair.newPair(startTime, endTime);
+                logger.info("finish to get streaming batch, total message count:" + messages.size());
+                return new StreamingBatch(messages, timeRange);
+            } catch (ReflectiveOperationException e) {
+                throw new RuntimeException("failed to create instance of StreamingParser", e);
             }
-            final Pair<Long, Long> timeRange = Pair.newPair(startTime, endTime);
-            logger.info("finish to get streaming batch, total message count:" + messages.size());
-            return new StreamingBatch(messages, timeRange);
-        } catch (ReflectiveOperationException e) {
-            throw new RuntimeException("failed to create instance of StreamingParser", e);
+        } else {
+            throw new IllegalArgumentException("kafka is the only supported streaming type.");
         }
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/66b84a2c/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java
----------------------------------------------------------------------
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java
index 3455f1d..7b326e2 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/StreamingParser.java
@@ -50,6 +50,7 @@ import org.apache.kylin.engine.streaming.StreamingManager;
 import org.apache.kylin.common.util.StreamingMessage;
 import org.apache.kylin.metadata.model.IntermediateColumnDesc;
 import org.apache.kylin.metadata.model.TblColRef;
+import org.apache.kylin.metadata.realization.RealizationType;
 import org.apache.kylin.source.kafka.config.KafkaConfig;
 
 import com.google.common.base.Function;
@@ -68,9 +69,8 @@ public abstract class StreamingParser {
 
     abstract public boolean filter(StreamingMessage streamingMessage);
 
-    public static StreamingParser getStreamingParser(KafkaConfig kafkaConfig) throws ReflectiveOperationException {
-        final String cubeName = StreamingManager.getInstance(KylinConfig.getInstanceFromEnv()).getStreamingConfig(kafkaConfig.getName()).getCubeName();
-        final CubeInstance cubeInstance = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube(cubeName);
+    public static StreamingParser getStreamingParser(KafkaConfig kafkaConfig, RealizationType realizationType, String realizationName) throws ReflectiveOperationException {
+        final CubeInstance cubeInstance = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube(realizationName);
         List<TblColRef> columns = Lists.transform(new CubeJoinedFlatTableDesc(cubeInstance.getDescriptor(), null).getColumnList(), new Function<IntermediateColumnDesc, TblColRef>() {
             @Nullable
             @Override


[07/43] kylin git commit: minor, increase maxperm to avoid VM crash

Posted by sh...@apache.org.
minor, increase maxperm to avoid VM crash


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/5474fe4e
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/5474fe4e
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/5474fe4e

Branch: refs/heads/helix-rebase
Commit: 5474fe4e887db263df0da7837f3fd9b022f18a79
Parents: 4d6043f
Author: Hongbin Ma <ma...@apache.org>
Authored: Tue Mar 1 09:49:56 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Tue Mar 1 09:51:12 2016 +0800

----------------------------------------------------------------------
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/5474fe4e/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index cb1e49e..42a0c6d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -668,7 +668,7 @@
                                     <value>false</value>
                                 </property>
                             </systemProperties>
-                            <argLine>-Xmx4G -XX:MaxPermSize=256M</argLine>
+                            <argLine>-Xmx4G -XX:MaxPermSize=512M</argLine>
                         </configuration>
                     </plugin>
 


[29/43] kylin git commit: KYLIN-1311 Stream cubing auto assignment and load balance

Posted by sh...@apache.org.
KYLIN-1311 Stream cubing auto assignment and load balance


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/55558551
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/55558551
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/55558551

Branch: refs/heads/helix-rebase
Commit: 55558551e52d71a4f97fc3f0a18d6ef5d8328abd
Parents: 96e9577
Author: shaofengshi <sh...@apache.org>
Authored: Wed Jan 13 12:00:48 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:19 2016 +0800

----------------------------------------------------------------------
 .../apache/kylin/rest/constant/Constant.java    |  1 +
 .../kylin/rest/helix/HelixClusterAdmin.java     | 22 +++++++--
 .../helix/LeaderStandbyStateModelFactory.java   | 50 +++++++++++++++++++-
 3 files changed, 68 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/55558551/server/src/main/java/org/apache/kylin/rest/constant/Constant.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/constant/Constant.java b/server/src/main/java/org/apache/kylin/rest/constant/Constant.java
index f068e5f..58b74f0 100644
--- a/server/src/main/java/org/apache/kylin/rest/constant/Constant.java
+++ b/server/src/main/java/org/apache/kylin/rest/constant/Constant.java
@@ -41,6 +41,7 @@ public class Constant {
 
     public final static String SERVER_MODE_QUERY = "query";
     public final static String SERVER_MODE_JOB = "job";
+    public final static String SERVER_MODE_STREAM = "stream";
     public final static String SERVER_MODE_ALL = "all";
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/55558551/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java b/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
index 9983aae..6300383 100644
--- a/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
+++ b/server/src/main/java/org/apache/kylin/rest/helix/HelixClusterAdmin.java
@@ -45,10 +45,12 @@ import java.util.concurrent.ConcurrentMap;
 public class HelixClusterAdmin {
 
     public static final String RESOURCE_NAME_JOB_ENGINE = "Resource_JobEngine";
+    public static final String RESOURCE_STREAME_CUBE_PREFIX = "Resource_Streame_";
 
     public static final String MODEL_LEADER_STANDBY = "LeaderStandby";
     public static final String MODEL_ONLINE_OFFLINE = "OnlineOffline";
     public static final String TAG_JOB_ENGINE = "Tag_JobEngine";
+    public static final String TAG_STREAM_BUILDER = "Tag_StreamBuilder";
 
     private static ConcurrentMap<KylinConfig, HelixClusterAdmin> instanceMaps = Maps.newConcurrentMap();
     private HelixManager participantManager;
@@ -74,11 +76,15 @@ public class HelixClusterAdmin {
 
         // use the tag to mark node's role.
         final List<String> instanceTags = Lists.newArrayList();
-        final boolean runJobEngine = Constant.SERVER_MODE_ALL.equalsIgnoreCase(kylinConfig.getServerMode()) || Constant.SERVER_MODE_JOB.equalsIgnoreCase(kylinConfig.getServerMode());
-        if (runJobEngine) {
+        if (Constant.SERVER_MODE_ALL.equalsIgnoreCase(kylinConfig.getServerMode())) {
             instanceTags.add(HelixClusterAdmin.TAG_JOB_ENGINE);
+            instanceTags.add(HelixClusterAdmin.TAG_STREAM_BUILDER);
+        } else if (Constant.SERVER_MODE_JOB.equalsIgnoreCase(kylinConfig.getServerMode())) {
+            instanceTags.add(HelixClusterAdmin.TAG_JOB_ENGINE);
+        } else if (Constant.SERVER_MODE_STREAM.equalsIgnoreCase(kylinConfig.getServerMode())) {
+            instanceTags.add(HelixClusterAdmin.TAG_STREAM_BUILDER);
         }
-
+        
         addInstance(instanceName, instanceTags);
         startInstance(instanceName);
 
@@ -108,6 +114,16 @@ public class HelixClusterAdmin {
         }
 
     }
+    
+    public void addStreamCubeSlice(String cubeName, long start, long end) {
+        String resourceName = RESOURCE_STREAME_CUBE_PREFIX + cubeName + "_" + start + "_" + end;
+        if (!admin.getResourcesInCluster(clusterName).contains(resourceName)) {
+            admin.addResource(clusterName, resourceName, 1, MODEL_LEADER_STANDBY, IdealState.RebalanceMode.SEMI_AUTO.name());
+        }
+
+        admin.rebalance(clusterName, resourceName, 2, "", TAG_STREAM_BUILDER);
+        
+    }
 
     /**
      * Start the instance and register the state model factory

http://git-wip-us.apache.org/repos/asf/kylin/blob/55558551/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java b/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
index 6694c81..c2a78e7 100644
--- a/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
+++ b/server/src/main/java/org/apache/kylin/rest/helix/LeaderStandbyStateModelFactory.java
@@ -1,5 +1,6 @@
 package org.apache.kylin.rest.helix;
 
+import com.google.common.base.Preconditions;
 import org.apache.helix.NotificationContext;
 import org.apache.helix.api.StateTransitionHandlerFactory;
 import org.apache.helix.api.TransitionHandler;
@@ -8,12 +9,16 @@ import org.apache.helix.api.id.ResourceId;
 import org.apache.helix.model.Message;
 import org.apache.helix.participant.statemachine.Transition;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.engine.streaming.OneOffStreamingBuilder;
+import org.apache.kylin.engine.streaming.cli.StreamingCLI;
 import org.apache.kylin.job.engine.JobEngineConfig;
 import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
 import org.apache.kylin.job.lock.MockJobLock;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.kylin.rest.helix.HelixClusterAdmin.RESOURCE_STREAME_CUBE_PREFIX;
+
 /**
  */
 public class LeaderStandbyStateModelFactory extends StateTransitionHandlerFactory<TransitionHandler> {
@@ -22,13 +27,19 @@ public class LeaderStandbyStateModelFactory extends StateTransitionHandlerFactor
     @Override
     public TransitionHandler createStateTransitionHandler(PartitionId partitionId) {
         if (partitionId.getResourceId().equals(ResourceId.from(HelixClusterAdmin.RESOURCE_NAME_JOB_ENGINE))) {
-            return new JobEngineStateModel();
+            return JobEngineStateModel.INSTANCE;
         }
-        
+
+        if (partitionId.getResourceId().stringify().startsWith(RESOURCE_STREAME_CUBE_PREFIX)) {
+            return StreamCubeStateModel.INSTANCE;
+        }
+
         return null;
     }
 
     public static class JobEngineStateModel extends TransitionHandler {
+        
+        public static JobEngineStateModel INSTANCE = new JobEngineStateModel();
 
         @Transition(to = "LEADER", from = "STANDBY")
         public void onBecomeLeaderFromStandby(Message message, NotificationContext context) {
@@ -67,4 +78,39 @@ public class LeaderStandbyStateModelFactory extends StateTransitionHandlerFactor
 
         }
     }
+
+    public static class StreamCubeStateModel extends TransitionHandler {
+        
+        public static StreamCubeStateModel INSTANCE = new StreamCubeStateModel();
+
+        @Transition(to = "LEADER", from = "STANDBY")
+        public void onBecomeLeaderFromStandby(Message message, NotificationContext context) {
+            String resourceName = message.getResourceId().stringify();
+            Preconditions.checkArgument(resourceName.startsWith(RESOURCE_STREAME_CUBE_PREFIX));
+            long end = Long.parseLong(resourceName.substring(resourceName.lastIndexOf("_")) + 1);
+            String temp = resourceName.substring(RESOURCE_STREAME_CUBE_PREFIX.length(), resourceName.lastIndexOf("_"));
+            long start = Long.parseLong(temp.substring(temp.lastIndexOf("_")) + 1);
+            String cubeName = temp.substring(0, temp.lastIndexOf("_"));
+
+            final Runnable runnable = new OneOffStreamingBuilder(cubeName, start, end).build();
+            runnable.run();
+        }
+
+        @Transition(to = "STANDBY", from = "LEADER")
+        public void onBecomeStandbyFromLeader(Message message, NotificationContext context) {
+           
+
+        }
+
+        @Transition(to = "STANDBY", from = "OFFLINE")
+        public void onBecomeStandbyFromOffline(Message message, NotificationContext context) {
+           
+        }
+
+
+        @Transition(to = "OFFLINE", from = "STANDBY")
+        public void onBecomeOfflineFromStandby(Message message, NotificationContext context) {
+           
+        }
+    }
 }


[37/43] kylin git commit: KYLIN-1415 Cube parallel merge

Posted by sh...@apache.org.
KYLIN-1415 Cube parallel merge


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/495ad927
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/495ad927
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/495ad927

Branch: refs/heads/helix-rebase
Commit: 495ad9278586d3fc4b7620779e77fcf511a7f2e9
Parents: 3bb345e
Author: shaofengshi <sh...@apache.org>
Authored: Sun Feb 14 22:11:59 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:20 2016 +0800

----------------------------------------------------------------------
 .../java/org/apache/kylin/cube/CubeManager.java | 50 ++++++++++++--------
 .../apache/kylin/rest/service/CubeService.java  |  2 +-
 2 files changed, 32 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/495ad927/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
index 4951ce6..16922ac 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
@@ -18,13 +18,11 @@
 
 package org.apache.kylin.cube;
 
-import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.*;
-import java.util.concurrent.ConcurrentHashMap;
-
-import javax.annotation.Nullable;
-
+import com.google.common.base.Function;
+import com.google.common.collect.Collections2;
+import com.google.common.collect.HashMultimap;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Multimap;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
@@ -46,9 +44,7 @@ import org.apache.kylin.metadata.MetadataManager;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.metadata.model.TblColRef;
-import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.metadata.project.ProjectManager;
-import org.apache.kylin.metadata.project.RealizationEntry;
 import org.apache.kylin.metadata.realization.IRealization;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
 import org.apache.kylin.metadata.realization.IRealizationProvider;
@@ -59,11 +55,11 @@ import org.apache.kylin.source.SourceFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Function;
-import com.google.common.collect.Collections2;
-import com.google.common.collect.HashMultimap;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Multimap;
+import javax.annotation.Nullable;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
 
 /**
  * @author yangli9
@@ -460,8 +456,14 @@ public class CubeManager implements IRealizationProvider {
     }
 
     public CubeSegment mergeSegments(CubeInstance cube, final long startDate, final long endDate, boolean forceMergeEmptySeg) throws IOException {
-        checkNoBuildingSegment(cube);
+        return mergeSegments(cube, startDate, endDate, forceMergeEmptySeg, true);
+    }
+
+    public CubeSegment mergeSegments(CubeInstance cube, final long startDate, final long endDate, boolean forceMergeEmptySeg, boolean strictCheck) throws IOException {
         checkCubeIsPartitioned(cube);
+        
+        if (strictCheck)
+            checkNoBuildingSegment(cube);
 
         Pair<Long, Long> range = alignMergeRange(cube, startDate, endDate);
         CubeSegment newSegment = newSegment(cube, range.getFirst(), range.getSecond());
@@ -621,13 +623,23 @@ public class CubeManager implements IRealizationProvider {
             return null;
         }
 
-        if (cube.getBuildingSegments().size() > 0) {
-            logger.debug("Cube " + cube.getName() + " has bulding segment, will not trigger merge at this moment");
+        List<CubeSegment> readySegments = Lists.newArrayList(cube.getSegments(SegmentStatusEnum.READY));
+
+        if (readySegments.size() == 0) {
+            logger.debug("Cube " + cube.getName() + " has no ready segment to merge");
             return null;
         }
+        List<CubeSegment> buildingSegments = Lists.newArrayList(cube.getSegments(SegmentStatusEnum.NEW));
+        List<CubeSegment> toSkipSegments = Lists.newArrayList();
+        for (CubeSegment building : buildingSegments) {
+            for (CubeSegment ready : readySegments) {
+                if (ready.getDateRangeStart() >= building.getDateRangeStart() && ready.getDateRangeEnd() <= building.getDateRangeEnd()) {
+                    toSkipSegments.add(ready);
+                }
+            }
+        }
 
-        List<CubeSegment> readySegments = Lists.newArrayList(cube.getSegments(SegmentStatusEnum.READY));
-
+        readySegments.removeAll(toSkipSegments);
         if (readySegments.size() == 0) {
             logger.debug("Cube " + cube.getName() + " has no ready segment to merge");
             return null;

http://git-wip-us.apache.org/repos/asf/kylin/blob/495ad927/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
index b2a278a..abc613b 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -653,7 +653,7 @@ public class CubeService extends BasicService {
                     cube = getCubeManager().getCube(cubeName);
                     CubeSegment newSeg = getCubeManager().autoMergeCubeSegments(cube);
                     if (newSeg != null) {
-                        newSeg = getCubeManager().mergeSegments(cube, newSeg.getDateRangeStart(), newSeg.getDateRangeEnd(), true);
+                        newSeg = getCubeManager().mergeSegments(cube, newSeg.getDateRangeStart(), newSeg.getDateRangeEnd(), true, false);
                         logger.debug("Will submit merge job on " + newSeg);
                         DefaultChainedExecutable job = EngineFactory.createBatchMergeJob(newSeg, "SYSTEM");
                         getExecutableManager().addJob(job);


[41/43] kylin git commit: KYLIN-1421 fix the “Last build time” is always empty issue

Posted by sh...@apache.org.
KYLIN-1421 fix the “Last build time” is always empty issue


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/7edc8b89
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/7edc8b89
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/7edc8b89

Branch: refs/heads/helix-rebase
Commit: 7edc8b892ae9f955fc27cd9f047049a56ab41651
Parents: 0ff0e6d
Author: shaofengshi <sh...@apache.org>
Authored: Tue Feb 16 14:07:00 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:20 2016 +0800

----------------------------------------------------------------------
 .../apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java    | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/7edc8b89/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
----------------------------------------------------------------------
diff --git a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
index d373e4a..4b11408 100644
--- a/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
+++ b/engine-streaming/src/main/java/org/apache/kylin/engine/streaming/cube/StreamingCubeBuilder.java
@@ -117,6 +117,7 @@ public class StreamingCubeBuilder implements StreamingBatchBuilder {
             CubeSegment segment = cubeManager.appendSegments(cubeInstance, streamingBatch.getTimeRange().getFirst(), streamingBatch.getTimeRange().getSecond(), false, false);
             segment.setLastBuildJobID(segment.getUuid()); // give a fake job id
             segment.setInputRecords(streamingBatch.getMessages().size());
+            segment.setLastBuildTime(System.currentTimeMillis());
             return segment;
         } catch (IOException e) {
             throw new RuntimeException("failed to create IBuildable", e);


[06/43] kylin git commit: KYLIN-1445 Check HIVE_CONF directory before startup Kylin instance

Posted by sh...@apache.org.
KYLIN-1445 Check HIVE_CONF directory before startup Kylin instance


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/4d6043f1
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/4d6043f1
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/4d6043f1

Branch: refs/heads/helix-rebase
Commit: 4d6043f1c66a358f3c5e57e9cb14861401f09b28
Parents: 9c77a5e
Author: lidongsjtu <li...@apache.org>
Authored: Wed Feb 24 15:42:52 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Mon Feb 29 19:27:13 2016 +0800

----------------------------------------------------------------------
 build/bin/find-hive-dependency.sh | 24 +++++++++++++++++++++---
 1 file changed, 21 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/4d6043f1/build/bin/find-hive-dependency.sh
----------------------------------------------------------------------
diff --git a/build/bin/find-hive-dependency.sh b/build/bin/find-hive-dependency.sh
index 171c5b1..87f18dd 100644
--- a/build/bin/find-hive-dependency.sh
+++ b/build/bin/find-hive-dependency.sh
@@ -32,6 +32,13 @@ hive_classpath=`echo $hive_env | grep 'env:CLASSPATH' | awk -F '=' '{print $2}'`
 arr=(`echo $hive_classpath | cut -d ":"  --output-delimiter=" " -f 1-`)
 hive_conf_path=
 hive_exec_path=
+
+if [ -n "$HIVE_CONF" ]
+then
+    echo "HIVE_CONF is set to: $HIVE_CONF, use it to locate hive configurations."
+    hive_conf_path=$HIVE_CONF
+fi
+
 for data in ${arr[@]}
 do
     result=`echo $data | grep -e 'hive-exec[a-z0-9A-Z\.-]*jar'`
@@ -39,13 +46,24 @@ do
     then
         hive_exec_path=$data
     fi
-    result=`echo $data | grep -e 'hive[^/]*/conf'`
-    if [ $result ]
+
+    # in some versions of hive config is not in hive's classpath, find it separately
+    if [ -z "$hive_conf_path" ]
     then
-        hive_conf_path=$data
+        result=`echo $data | grep -e 'hive[^/]*/conf'`
+        if [ $result ]
+        then
+            hive_conf_path=$data
+        fi
     fi
 done
 
+if [ -z "$hive_conf_path" ]
+then
+    echo "Couldn't find hive configuration directory. Please set HIVE_CONF to the path which contains hive-site.xml."
+    exit 1
+fi
+
 # in some versions of hive hcatalog is not in hive's classpath, find it separately
 if [ -z "$HCAT_HOME" ]
 then


[09/43] kylin git commit: minor, fix CI

Posted by sh...@apache.org.
minor, fix CI


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/098a8532
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/098a8532
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/098a8532

Branch: refs/heads/helix-rebase
Commit: 098a8532f65562644b9b54b465ce23fbed904269
Parents: ca59795
Author: Hongbin Ma <ma...@apache.org>
Authored: Tue Mar 1 18:41:33 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Tue Mar 1 18:41:33 2016 +0800

----------------------------------------------------------------------
 .../java/org/apache/kylin/query/ITKylinQueryTest.java | 14 +++++++++++++-
 1 file changed, 13 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/098a8532/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
index fa71db2..fd88452 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
@@ -25,12 +25,14 @@ import java.sql.DriverManager;
 import java.util.List;
 import java.util.Properties;
 
+import net.sf.ehcache.CacheManager;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.query.enumerator.OLAPQuery;
 import org.apache.kylin.query.relnode.OLAPContext;
 import org.apache.kylin.query.schema.OLAPSchemaFactory;
+import org.apache.kylin.storage.cache.AbstractCacheFledgedQuery;
 import org.apache.kylin.storage.hbase.cube.v1.coprocessor.observer.ObserverEnabler;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
 import org.dbunit.database.DatabaseConnection;
@@ -42,6 +44,8 @@ import org.junit.Test;
 
 @Ignore("KylinQueryTest is contained by ITCombinationTest")
 public class ITKylinQueryTest extends KylinTestBase {
+    private static CacheManager cacheManager;
+
 
     @BeforeClass
     public static void setUp() throws Exception {
@@ -74,6 +78,9 @@ public class ITKylinQueryTest extends KylinTestBase {
         // Load H2 Tables (inner join)
         H2Database h2DB = new H2Database(h2Connection, config);
         h2DB.loadAllTables();
+
+        cacheManager = CacheManager.newInstance("../server/src/main/resources/ehcache-test.xml");
+        AbstractCacheFledgedQuery.setCacheManager(cacheManager);
     }
 
     protected static void clean() {
@@ -84,6 +91,11 @@ public class ITKylinQueryTest extends KylinTestBase {
 
         ObserverEnabler.forceCoprocessorUnset();
         HBaseMetadataTestCase.staticCleanupTestMetadata();
+
+        if (cacheManager != null) {
+            cacheManager.shutdown();
+        }
+        AbstractCacheFledgedQuery.setCacheManager(null);
     }
 
     @Ignore("this is only for debug")
@@ -237,7 +249,7 @@ public class ITKylinQueryTest extends KylinTestBase {
 
     @Test
     public void testTopNQuery() throws Exception {
-            if ("left".equalsIgnoreCase(joinType)) {
+        if ("left".equalsIgnoreCase(joinType)) {
             this.execAndCompQuery("src/test/resources/query/sql_topn", null, true);
         }
     }


[26/43] kylin git commit: rebase 2.x-staging

Posted by sh...@apache.org.
rebase 2.x-staging


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/3b5260ab
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/3b5260ab
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/3b5260ab

Branch: refs/heads/helix-rebase
Commit: 3b5260abc8d66b04d846cb933d927236d256bb92
Parents: 4022868
Author: shaofengshi <sh...@apache.org>
Authored: Wed Dec 30 14:22:35 2015 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Fri Mar 4 09:52:19 2016 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/common/KylinConfig.java    |  4 +++
 .../kylin/rest/controller/JobController.java    | 26 +++++++-------------
 .../apache/kylin/rest/service/CubeService.java  |  8 +++---
 3 files changed, 18 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/3b5260ab/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
index 81f5827..ea77e47 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
@@ -264,5 +264,9 @@ public class KylinConfig extends KylinConfigBase {
             out.println(key + "=" + val);
         }
     }
+    
+    public String getClusterName() {
+        return this.getOptional("kylin.cluster.name", getMetadataUrlPrefix());
+    }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/3b5260ab/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/controller/JobController.java b/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
index 4d0824a..9dfb594 100644
--- a/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
+++ b/server/src/main/java/org/apache/kylin/rest/controller/JobController.java
@@ -26,18 +26,15 @@ import java.util.List;
 import java.util.Map;
 import java.util.TimeZone;
 
-import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import joptsimple.internal.Strings;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.restclient.Broadcaster;
 import org.apache.kylin.job.JobInstance;
 import org.apache.kylin.job.constant.JobStatusEnum;
 import org.apache.kylin.job.constant.JobTimeFilterEnum;
-import org.apache.kylin.job.engine.JobEngineConfig;
-import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
-import org.apache.kylin.job.lock.JobLock;
-import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.exception.InternalErrorException;
+import org.apache.kylin.rest.helix.HelixJobEngineAdmin;
 import org.apache.kylin.rest.request.JobListRequest;
 import org.apache.kylin.rest.service.JobService;
 import org.slf4j.Logger;
@@ -50,6 +47,9 @@ import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.ResponseBody;
 
+import java.io.IOException;
+import java.util.*;
+
 /**
  * @author ysong1
  * @author Jack
@@ -63,9 +63,6 @@ public class JobController extends BasicController implements InitializingBean {
     @Autowired
     private JobService jobService;
 
-    @Autowired
-    private JobLock jobLock;
-
     /*
      * (non-Javadoc)
      * 
@@ -79,15 +76,9 @@ public class JobController extends BasicController implements InitializingBean {
         TimeZone tzone = TimeZone.getTimeZone(timeZone);
         TimeZone.setDefault(tzone);
 
-        if (System.getProperty("kylin.rest.address") == null) {
-            throw new RuntimeException("There is no -Dkylin.rest.address set; Please check bin/kylin.sh");
-        }
-
-        final String restAddress = System.getProperty("kylin.rest.address");
-        final String hostname = Preconditions.checkNotNull(restAddress.substring(0, restAddress.lastIndexOf(":")));
-        final String port = Preconditions.checkNotNull(restAddress.substring(restAddress.lastIndexOf(":") + 1));
-        final String instanceName = hostname + "_" + port;
+        final String instanceName = HelixJobEngineAdmin.getCurrentInstanceName();
         final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+
     }
 
     /**
@@ -199,7 +190,7 @@ public class JobController extends BasicController implements InitializingBean {
         this.jobService = jobService;
     }
 
-    private void updateKylinConfig(List<String> instances) {
+    private void updateKylinCluster(List<String> instances) {
         List<String> instanceRestAddresses = Lists.newArrayList();
         for (String instanceName : instances) {
             int indexOfUnderscore = instanceName.lastIndexOf("_");
@@ -208,6 +199,7 @@ public class JobController extends BasicController implements InitializingBean {
         String restServersInCluster = Strings.join(instanceRestAddresses, ",");
         KylinConfig.getInstanceFromEnv().setProperty("kylin.rest.servers", restServersInCluster);
         System.setProperty("kylin.rest.servers", restServersInCluster);
+        Broadcaster.clearCache();
 
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/3b5260ab/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
index 0c57d00..7916835 100644
--- a/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -56,6 +56,7 @@ import org.apache.kylin.metadata.realization.RealizationStatusEnum;
 import org.apache.kylin.metadata.realization.RealizationType;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.exception.InternalErrorException;
+import org.apache.kylin.rest.helix.HelixJobEngineAdmin;
 import org.apache.kylin.rest.request.MetricsRequest;
 import org.apache.kylin.rest.response.HBaseResponse;
 import org.apache.kylin.rest.response.MetricsResponse;
@@ -589,9 +590,10 @@ public class CubeService extends BasicService {
     public void updateOnNewSegmentReady(String cubeName) {
         logger.debug("on updateOnNewSegmentReady: " + cubeName);
         final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-        String serverMode = kylinConfig.getServerMode();
-        logger.debug("server mode: " + serverMode);
-        if (Constant.SERVER_MODE_JOB.equals(serverMode.toLowerCase()) || Constant.SERVER_MODE_ALL.equals(serverMode.toLowerCase())) {
+        HelixJobEngineAdmin jobEngineAdmin = HelixJobEngineAdmin.getInstance(kylinConfig.getZookeeperAddress());
+        boolean isLeaderRole = jobEngineAdmin.isLeaderRole(kylinConfig.getClusterName(), HelixJobEngineAdmin.getCurrentInstanceName());
+        logger.debug("server is leader role ? " + isLeaderRole);
+        if (isLeaderRole == true) {
             keepCubeRetention(cubeName);
             mergeCubeSegment(cubeName);
         }