You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@kylin.apache.org by GitBox <gi...@apache.org> on 2018/02/13 06:04:33 UTC

[GitHub] yiming187 closed pull request #48: Branch kylin 1.6.0 cdh5.9

yiming187 closed pull request #48: Branch kylin 1.6.0 cdh5.9
URL: https://github.com/apache/kylin/pull/48
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/assembly/pom.xml b/assembly/pom.xml
index e6f83a8b18..17716bc11a 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -17,12 +17,11 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <parent>
         <artifactId>kylin</artifactId>
         <groupId>org.apache.kylin</groupId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
 
     </parent>
     <modelVersion>4.0.0</modelVersion>
diff --git a/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java b/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
index 4a24ad2e02..54feb245ef 100644
--- a/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
+++ b/assembly/src/test/java/org/apache/kylin/job/DeployUtil.java
@@ -45,8 +45,9 @@
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.metadata.model.TableRef;
 import org.apache.kylin.metadata.model.TblColRef;
-import org.apache.kylin.source.hive.HiveClient;
+import org.apache.kylin.source.hive.HiveClientFactory;
 import org.apache.kylin.source.hive.HiveCmdBuilder;
+import org.apache.kylin.source.hive.IHiveClient;
 import org.apache.kylin.source.kafka.TimedJsonStreamParser;
 import org.apache.maven.model.Model;
 import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
@@ -234,7 +235,7 @@ private static void deployHiveTables() throws Exception {
         String tableFileDir = temp.getParent();
         temp.delete();
 
-        HiveClient hiveClient = new HiveClient();
+        IHiveClient hiveClient = HiveClientFactory.getHiveClient();
         // create hive tables
         hiveClient.executeHQL("CREATE DATABASE IF NOT EXISTS EDW");
         hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_CAL_DT.toUpperCase())));
diff --git a/atopcalcite/pom.xml b/atopcalcite/pom.xml
index 177b9330cb..51859629bd 100644
--- a/atopcalcite/pom.xml
+++ b/atopcalcite/pom.xml
@@ -17,8 +17,7 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
     <artifactId>atopcalcite</artifactId>
@@ -28,7 +27,7 @@
     <parent>
         <groupId>org.apache.kylin</groupId>
         <artifactId>kylin</artifactId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
     </parent>
 
     <properties>
diff --git a/build/bin/kylin.sh b/build/bin/kylin.sh
index 9286055e5d..ad3a95299e 100644
--- a/build/bin/kylin.sh
+++ b/build/bin/kylin.sh
@@ -32,7 +32,6 @@ function retrieveDependency() {
     #retrive $hive_dependency and $hbase_dependency
     source ${dir}/find-hive-dependency.sh
     source ${dir}/find-hbase-dependency.sh
-    #source ${dir}/find-kafka-dependency.sh
 
     #retrive $KYLIN_EXTRA_START_OPTS
     if [ -f "${dir}/setenv.sh" ]
@@ -41,7 +40,11 @@ function retrieveDependency() {
 
     export HBASE_CLASSPATH_PREFIX=${KYLIN_HOME}/conf:${KYLIN_HOME}/lib/*:${KYLIN_HOME}/tool/*:${KYLIN_HOME}/ext/*:${HBASE_CLASSPATH_PREFIX}
     export HBASE_CLASSPATH=${HBASE_CLASSPATH}:${hive_dependency}
-    #export HBASE_CLASSPATH=${HBASE_CLASSPATH}:${kafka_dependency}
+    if [ -n "$KAFKA_HOME" ]
+    then
+        source ${dir}/find-kafka-dependency.sh
+        export HBASE_CLASSPATH=${HBASE_CLASSPATH}:${kafka_dependency}
+    fi
 }
 
 # start command
diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
index ed86bdbee3..142a9cdf30 100644
--- a/build/conf/kylin.properties
+++ b/build/conf/kylin.properties
@@ -48,9 +48,6 @@ kylin.metadata.url=kylin_metadata@hbase
 # The storage for final cube file in hbase
 kylin.storage.url=hbase
 
-# In seconds (2 days)
-kylin.storage.cleanup.time.threshold=172800000
-
 # Working folder in HDFS, make sure user has the right access to the hdfs directory
 kylin.hdfs.working.dir=/kylin
 
@@ -140,6 +137,10 @@ kylin.query.mem.budget=3221225472
 
 kylin.query.coprocessor.mem.gb=3
 
+# the default coprocessor timeout is (hbase.rpc.timeout * 0.9) / 1000 seconds,
+# you can set it to a smaller value. 0 means use default.
+# kylin.query.coprocessor.timeout.seconds=0
+
 # Enable/disable ACL check for cube query
 kylin.query.security.enabled=true
 
@@ -165,11 +166,13 @@ ldap.password=
 ldap.user.searchBase=
 ldap.user.searchPattern=
 ldap.user.groupSearchBase=
+ldap.user.groupSearchFilter=
 
 # LDAP service account directory
 ldap.service.searchBase=
 ldap.service.searchPattern=
 ldap.service.groupSearchBase=
+ldap.service.groupSearchFilter= 
 
 ## SAML configurations for SSO
 # SAML IDP metadata file location
diff --git a/build/script/build.sh b/build/script/build.sh
index f54d41c448..c68ca94e36 100755
--- a/build/script/build.sh
+++ b/build/script/build.sh
@@ -31,4 +31,4 @@ npm install -g bower			 || { exit 1; }
 bower --allow-root install		 || { exit 1; }
 npm install						 || { exit 1; }
 npm install -g grunt-cli		 || { exit 1; }
-grunt dev --buildEnv=dev		 || { exit 1; }
+grunt dev --buildEnv=dev --buildNumber=`date "+%Y%m%d%H%M%S"`  || { exit 1; }
diff --git a/core-common/pom.xml b/core-common/pom.xml
index 780fd822b0..a2ca10f36b 100644
--- a/core-common/pom.xml
+++ b/core-common/pom.xml
@@ -17,8 +17,7 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
     <artifactId>kylin-core-common</artifactId>
@@ -28,7 +27,7 @@
     <parent>
         <groupId>org.apache.kylin</groupId>
         <artifactId>kylin</artifactId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
 
     </parent>
 
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index f49127ba27..c2595fb608 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -22,9 +22,9 @@
 import java.io.IOException;
 import java.io.Serializable;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.SortedSet;
-import java.util.Map.Entry;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -477,6 +477,11 @@ public float getHBaseHFileSizeGB() {
         return Float.parseFloat(getOptional("kylin.hbase.hfile.size.gb", "2.0"));
     }
 
+    //check KYLIN-1684, in most cases keep the default value
+    public boolean isSkippingEmptySegments() {
+        return Boolean.valueOf(getOptional("kylin.query.skip-empty-segments", "true"));
+    }
+
     public int getStoragePushDownLimitMax() {
         return Integer.parseInt(getOptional("kylin.query.pushdown.limit.max", "10000"));
     }
@@ -485,10 +490,6 @@ public int getScanThreshold() {
         return Integer.parseInt(getOptional("kylin.query.scan.threshold", "10000000"));
     }
 
-    public float getCubeVisitTimeoutTimes() {
-        return Float.parseFloat(getOptional("kylin.query.cube.visit.timeout.times", "1"));
-    }
-
     public int getBadQueryStackTraceDepth() {
         return Integer.parseInt(getOptional("kylin.query.badquery.stacktrace.depth", "10"));
     }
@@ -508,7 +509,7 @@ public int getBadQueryDefaultDetectIntervalSeconds() {
     public boolean getBadQueryPersistentEnabled() {
         return Boolean.parseBoolean(getOptional("kylin.query.badquery.persistent.enable", "true"));
     }
-    
+
     public String[] getQueryTransformers() {
         return getOptionalStringArray("kylin.query.transformers", new String[0]);
     }
@@ -541,6 +542,10 @@ public double getQueryCoprocessorMemGB() {
         return Double.parseDouble(this.getOptional("kylin.query.coprocessor.mem.gb", "3.0"));
     }
 
+    public int getQueryCoprocessorTimeoutSeconds() {
+        return Integer.parseInt(this.getOptional("kylin.query.coprocessor.timeout.seconds", "0"));
+    }
+
     public boolean isQuerySecureEnabled() {
         return Boolean.parseBoolean(this.getOptional("kylin.query.security.enabled", "true"));
     }
@@ -633,6 +638,10 @@ public String getHiveDatabaseForIntermediateTable() {
         return this.getOptional("kylin.job.hive.database.for.intermediatetable", "default");
     }
 
+    public String getHiveDependencyFilterList() {
+        return this.getOptional("kylin.job.dependency.filterlist", "[^,]*hive-exec[0-9.-]+[^,]*?\\.jar" + "|" + "[^,]*hive-metastore[0-9.-]+[^,]*?\\.jar" + "|" + "[^,]*hive-hcatalog-core[0-9.-]+[^,]*?\\.jar");
+    }
+
     public String getKylinOwner() {
         return this.getOptional("kylin.owner", "");
     }
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/StringUtil.java b/core-common/src/main/java/org/apache/kylin/common/util/StringUtil.java
index 15872aa7ae..b336e4b7b0 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/StringUtil.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/StringUtil.java
@@ -124,4 +124,17 @@ else if (s2 == null)
         return result;
     }
 
+    public static void appendWithSeparator(StringBuilder src, String append) {
+        if (src == null) {
+            throw new IllegalArgumentException();
+        }
+        if (src.length() > 0 && src.toString().endsWith(",") == false) {
+            src.append(",");
+        }
+
+        if (StringUtils.isBlank(append) == false) {
+            src.append(append);
+        }
+    }
+
 }
diff --git a/core-cube/pom.xml b/core-cube/pom.xml
index 39bba59cda..7cfcf9f102 100644
--- a/core-cube/pom.xml
+++ b/core-cube/pom.xml
@@ -17,8 +17,7 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
     <artifactId>kylin-core-cube</artifactId>
@@ -28,7 +27,7 @@
     <parent>
         <groupId>org.apache.kylin</groupId>
         <artifactId>kylin</artifactId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
 
     </parent>
 
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
index 720690d6c8..72224572a6 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
@@ -439,6 +439,15 @@ public boolean needAutoMerge() {
         return this.getDescriptor().getAutoMergeTimeRanges() != null && this.getDescriptor().getAutoMergeTimeRanges().length > 0;
     }
 
+    public CubeSegment getLastSegment() {
+        List<CubeSegment> existing = getSegments();
+        if (existing.isEmpty()) {
+            return null;
+        } else {
+            return existing.get(existing.size() - 1);
+        }
+    }
+
     @Override
     public int getSourceType() {
         return getFactTableDesc().getSourceType();
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
index a53849e863..b7ea7b8647 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeManager.java
@@ -34,8 +34,6 @@
 import java.util.UUID;
 import java.util.concurrent.ConcurrentHashMap;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Maps;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.KylinConfigExt;
@@ -68,9 +66,11 @@
 import org.apache.kylin.metadata.realization.RealizationType;
 import org.apache.kylin.source.ReadableTable;
 import org.apache.kylin.source.SourceFactory;
+import org.apache.kylin.source.SourcePartition;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Preconditions;
 import com.google.common.collect.HashMultimap;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Multimap;
@@ -434,52 +434,20 @@ private CubeInstance updateCubeWithRetry(CubeUpdate update, int retry) throws IO
 
     // append a full build segment
     public CubeSegment appendSegment(CubeInstance cube) throws IOException {
-        return appendSegment(cube, 0, 0, 0, 0, null, null);
+        return appendSegment(cube, 0, Long.MAX_VALUE, 0, 0, null, null);
     }
 
     public CubeSegment appendSegment(CubeInstance cube, long startDate, long endDate) throws IOException {
         return appendSegment(cube, startDate, endDate, 0, 0, null, null);
     }
 
+    public CubeSegment appendSegment(CubeInstance cube, SourcePartition sourcePartition) throws IOException {
+        return appendSegment(cube, sourcePartition.getStartDate(), sourcePartition.getEndDate(), sourcePartition.getStartOffset(), sourcePartition.getEndOffset(), sourcePartition.getSourcePartitionOffsetStart(), sourcePartition.getSourcePartitionOffsetEnd());
+    }
+
     public CubeSegment appendSegment(CubeInstance cube, long startDate, long endDate, long startOffset, long endOffset, Map<Integer, Long> sourcePartitionOffsetStart, Map<Integer, Long> sourcePartitionOffsetEnd) throws IOException {
         checkBuildingSegment(cube);
 
-        if (sourcePartitionOffsetStart == null) {
-            sourcePartitionOffsetStart = Maps.newHashMap();
-        }
-        if (sourcePartitionOffsetEnd == null) {
-            sourcePartitionOffsetEnd = Maps.newHashMap();
-        }
-
-        boolean isOffsetsOn = endOffset != 0;
-        if (isOffsetsOn == true) {
-            checkSourceOffsets(startOffset, endOffset, sourcePartitionOffsetStart, sourcePartitionOffsetEnd);
-        }
-
-        if (cube.getDescriptor().getModel().getPartitionDesc().isPartitioned()) {
-            // try figure out a reasonable start if missing
-            if (startDate == 0 && startOffset == 0) {
-                final CubeSegment last = getLatestSegment(cube);
-                if (last != null) {
-                    if (isOffsetsOn) {
-                        if (last.getSourceOffsetEnd() == Long.MAX_VALUE) {
-                            throw new IllegalStateException("There is already one pending for building segment, please submit request later.");
-                        }
-                        startOffset = last.getSourceOffsetEnd();
-                        sourcePartitionOffsetStart = last.getSourcePartitionOffsetEnd();
-                    } else {
-                        startDate = last.getDateRangeEnd();
-                    }
-                }
-            }
-
-        } else {
-            startDate = 0;
-            endDate = Long.MAX_VALUE;
-            startOffset = 0;
-            endOffset = 0;
-        }
-
         CubeSegment newSegment = newSegment(cube, startDate, endDate, startOffset, endOffset);
         newSegment.setSourcePartitionOffsetStart(sourcePartitionOffsetStart);
         newSegment.setSourcePartitionOffsetEnd(sourcePartitionOffsetEnd);
@@ -638,7 +606,7 @@ public static long maxDateRangeEnd(List<CubeSegment> mergingSegments) {
         return max;
     }
 
-    private CubeSegment getLatestSegment(CubeInstance cube) {
+    public CubeSegment getLatestSegment(CubeInstance cube) {
         List<CubeSegment> existing = cube.getSegments();
         if (existing.isEmpty()) {
             return null;
@@ -647,49 +615,28 @@ private CubeSegment getLatestSegment(CubeInstance cube) {
         }
     }
 
-    private void checkBuildingSegment(CubeInstance cube) {
-        int maxBuldingSeg = cube.getConfig().getMaxBuildingSegments();
-        if (cube.getBuildingSegments().size() >= maxBuldingSeg) {
-            throw new IllegalStateException("There is already " + cube.getBuildingSegments().size() + " building segment; ");
+    private long calculateStartOffsetForAppendSegment(CubeInstance cube) {
+        List<CubeSegment> existing = cube.getSegments();
+        if (existing.isEmpty()) {
+            return 0;
+        } else {
+            return existing.get(existing.size() - 1).getSourceOffsetEnd();
         }
     }
 
-    private void checkSourceOffsets(long startOffset, long endOffset, Map<Integer, Long> sourcePartitionOffsetStart, Map<Integer, Long> sourcePartitionOffsetEnd) {
-        if (endOffset <= 0)
-            return;
-
-        if (startOffset >= endOffset) {
-            throw new IllegalArgumentException("'startOffset' need be smaller than 'endOffset'");
-        }
-
-        if (startOffset > 0) {
-            if (sourcePartitionOffsetStart == null || sourcePartitionOffsetStart.size() == 0) {
-                throw new IllegalArgumentException("When 'startOffset' is > 0, need provide each partition's start offset");
-            }
-
-            long totalOffset = 0;
-            for (Long v : sourcePartitionOffsetStart.values()) {
-                totalOffset += v;
-            }
-
-            if (totalOffset != startOffset) {
-                throw new IllegalArgumentException("Invalid 'sourcePartitionOffsetStart', doesn't match with 'startOffset'");
-            }
+    private long calculateStartDateForAppendSegment(CubeInstance cube) {
+        List<CubeSegment> existing = cube.getSegments();
+        if (existing.isEmpty()) {
+            return cube.getDescriptor().getPartitionDateStart();
+        } else {
+            return existing.get(existing.size() - 1).getDateRangeEnd();
         }
+    }
 
-        if (endOffset > 0 && endOffset != Long.MAX_VALUE) {
-            if (sourcePartitionOffsetEnd == null || sourcePartitionOffsetEnd.size() == 0) {
-                throw new IllegalArgumentException("When 'endOffset' is not Long.MAX_VALUE, need provide each partition's start offset");
-            }
-
-            long totalOffset = 0;
-            for (Long v : sourcePartitionOffsetEnd.values()) {
-                totalOffset += v;
-            }
-
-            if (totalOffset != endOffset) {
-                throw new IllegalArgumentException("Invalid 'sourcePartitionOffsetEnd', doesn't match with 'endOffset'");
-            }
+    private void checkBuildingSegment(CubeInstance cube) {
+        int maxBuldingSeg = cube.getConfig().getMaxBuildingSegments();
+        if (cube.getBuildingSegments().size() >= maxBuldingSeg) {
+            throw new IllegalStateException("There is already " + cube.getBuildingSegments().size() + " building segment; ");
         }
     }
 
@@ -977,8 +924,10 @@ private synchronized CubeInstance reloadCubeLocalAt(String path) {
 
             if (!cubeDesc.getError().isEmpty()) {
                 cube.setStatus(RealizationStatusEnum.DESCBROKEN);
-                logger.warn("cube descriptor {} (for cube '{}') is broken", cubeDesc.getResourcePath(), cubeName);
-
+                logger.error("cube descriptor {} (for cube '{}') is broken", cubeDesc.getResourcePath(), cubeName);
+                for (String error : cubeDesc.getError()) {
+                    logger.error("Error: {}", error);
+                }
             } else if (cube.getStatus() == RealizationStatusEnum.DESCBROKEN) {
                 cube.setStatus(RealizationStatusEnum.DISABLED);
                 logger.info("cube {} changed from DESCBROKEN to DISABLED", cubeName);
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java b/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java
index fdf1fb03b0..b3d6d6b915 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeSegment.java
@@ -49,6 +49,8 @@
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
+import javax.annotation.concurrent.GuardedBy;
+
 @JsonAutoDetect(fieldVisibility = Visibility.NONE, getterVisibility = Visibility.NONE, isGetterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE)
 public class CubeSegment implements Comparable<CubeSegment>, IBuildable, ISegment {
 
@@ -115,7 +117,8 @@
     @JsonInclude(JsonInclude.Include.NON_EMPTY)
     private Map<String, String> additionalInfo = new LinkedHashMap<String, String>();
 
-    private volatile Map<Long, Short> cuboidBaseShards = Maps.newHashMap();//cuboid id ==> base(starting) shard for this cuboid
+    @GuardedBy("this")
+    private Map<Long, Short> cuboidBaseShards = Maps.newHashMap(); // cuboid id ==> base(starting) shard for this cuboid
 
     public CubeDesc getCubeDesc() {
         return getCubeInstance().getDescriptor();
@@ -516,7 +519,7 @@ public void setTotalShards(int totalShards) {
         this.totalShards = totalShards;
     }
 
-    public short getCuboidBaseShard(Long cuboidId) {
+    public synchronized short getCuboidBaseShard(Long cuboidId) {
         if (totalShards > 0) {
             //shard squashed case
 
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java b/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java
index 6e76ac33aa..3c88c62e12 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/AggregationGroup.java
@@ -140,7 +140,7 @@ private void buildMandatoryColumnMask(RowKeyDesc rowKeyDesc) {
         for (String dim : mandatory_dims) {
             TblColRef hColumn = cubeDesc.getModel().findColumn(dim);
             Integer index = rowKeyDesc.getColumnBitIndex(hColumn);
-            mandatoryColumnMask |= 1 << index;
+            mandatoryColumnMask |= (1L << index);
         }
 
     }
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
index 5c73f21aca..77e61796e1 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
@@ -39,6 +39,7 @@
 
 import javax.annotation.Nullable;
 
+import com.google.common.collect.Iterables;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.lang.ArrayUtils;
@@ -50,6 +51,7 @@
 import org.apache.kylin.common.persistence.RootPersistentEntity;
 import org.apache.kylin.common.util.Array;
 import org.apache.kylin.common.util.JsonUtil;
+import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.measure.MeasureType;
 import org.apache.kylin.measure.extendedcolumn.ExtendedColumnMeasureType;
 import org.apache.kylin.metadata.MetadataConstants;
@@ -565,7 +567,7 @@ public void validateAggregationGroups() {
                 throw new IllegalStateException("Aggregation group " + index + " select rule field not set");
             }
 
-            int combination = 1;
+            long combination = 1;
             Set<String> includeDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
             getDims(includeDims, agg.getIncludes());
 
@@ -583,12 +585,19 @@ public void validateAggregationGroups() {
             Set<String> jointDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
             getDims(jointDimsList, jointDims, agg.getSelectRule().joint_dims);
             if (jointDimsList.size() > 0) {
-                combination = combination * (1 << jointDimsList.size());
+                combination = combination * (1L << jointDimsList.size());
             }
 
             if (!includeDims.containsAll(mandatoryDims) || !includeDims.containsAll(hierarchyDims) || !includeDims.containsAll(jointDims)) {
-                logger.error("Aggregation group " + index + " Include dims not containing all the used dims");
-                throw new IllegalStateException("Aggregation group " + index + " Include dims not containing all the used dims");
+                List<String> notIncluded = Lists.newArrayList();
+                final Iterable<String> all = Iterables.unmodifiableIterable(Iterables.concat(mandatoryDims, hierarchyDims, jointDims));
+                for (String dim : all) {
+                    if (includeDims.contains(dim) == false) {
+                        notIncluded.add(dim);
+                    }
+                }
+                logger.error("Aggregation group " + index + " Include dimensions not containing all the used dimensions");
+                throw new IllegalStateException("Aggregation group " + index + " 'includes' dimensions not include all the dimensions:" + notIncluded.toString());
             }
 
             Set<String> normalDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
@@ -597,7 +606,7 @@ public void validateAggregationGroups() {
             normalDims.removeAll(hierarchyDims);
             normalDims.removeAll(jointDims);
 
-            combination = combination * (1 << normalDims.size());
+            combination = combination * (1L << normalDims.size());
 
             if (combination > config.getCubeAggrGroupMaxCombination()) {
                 String msg = "Aggregation group " + index + " has too many combinations, use 'mandatory'/'hierarchy'/'joint' to optimize; or update 'kylin.cube.aggrgroup.max.combination' to a bigger value.";
@@ -607,33 +616,37 @@ public void validateAggregationGroups() {
             }
 
             if (CollectionUtils.containsAny(mandatoryDims, hierarchyDims)) {
-                logger.warn("Aggregation group " + index + " mandatory dims overlap with hierarchy dims");
+
+                logger.warn("Aggregation group " + index + " mandatory dimensions overlap with hierarchy dimensions: " + Sets.intersection(mandatoryDims, hierarchyDims));
             }
             if (CollectionUtils.containsAny(mandatoryDims, jointDims)) {
-                logger.warn("Aggregation group " + index + " mandatory dims overlap with joint dims");
+                logger.warn("Aggregation group " + index + " mandatory dimensions overlap with joint dimensions: " + Sets.intersection(mandatoryDims, jointDims));
             }
 
             if (CollectionUtils.containsAny(hierarchyDims, jointDims)) {
-                logger.error("Aggregation group " + index + " hierarchy dims overlap with joint dims");
-                throw new IllegalStateException("Aggregation group " + index + " hierarchy dims overlap with joint dims");
+                logger.error("Aggregation group " + index + " hierarchy dimensions overlap with joint dimensions");
+                throw new IllegalStateException("Aggregation group " + index + " hierarchy dimensions overlap with joint dimensions: " + Sets.intersection(hierarchyDims, jointDims));
             }
 
             if (hasSingle(hierarchyDimsList)) {
-                logger.error("Aggregation group " + index + " require at least 2 dims in a hierarchy");
-                throw new IllegalStateException("Aggregation group " + index + " require at least 2 dims in a hierarchy");
+                logger.error("Aggregation group " + index + " require at least 2 dimensions in a hierarchy");
+                throw new IllegalStateException("Aggregation group " + index + " require at least 2 dimensions in a hierarchy.");
             }
             if (hasSingle(jointDimsList)) {
-                logger.error("Aggregation group " + index + " require at least 2 dims in a joint");
-                throw new IllegalStateException("Aggregation group " + index + " require at least 2 dims in a joint");
+                logger.error("Aggregation group " + index + " require at least 2 dimensions in a joint");
+                throw new IllegalStateException("Aggregation group " + index + " require at least 2 dimensions in a joint");
             }
 
-            if (hasOverlap(hierarchyDimsList, hierarchyDims)) {
-                logger.error("Aggregation group " + index + " a dim exist in more than one hierarchy");
-                throw new IllegalStateException("Aggregation group " + index + " a dim exist in more than one hierarchy");
+            Pair<Boolean, Set<String>> overlap = hasOverlap(hierarchyDimsList, hierarchyDims);
+            if (overlap.getFirst() == true) {
+                logger.error("Aggregation group " + index + " a dimension exist in more than one hierarchy: " + overlap.getSecond());
+                throw new IllegalStateException("Aggregation group " + index + " a dimension exist in more than one hierarchy: " + overlap.getSecond());
             }
-            if (hasOverlap(jointDimsList, jointDims)) {
-                logger.error("Aggregation group " + index + " a dim exist in more than one joint");
-                throw new IllegalStateException("Aggregation group " + index + " a dim exist in more than one joint");
+
+            overlap = hasOverlap(jointDimsList, jointDims);
+            if (overlap.getFirst() == true) {
+                logger.error("Aggregation group " + index + " a dimension exist in more than one joint: " + overlap.getSecond());
+                throw new IllegalStateException("Aggregation group " + index + " a dimension exist in more than one joint: " + overlap.getSecond());
             }
 
             index++;
@@ -664,21 +677,24 @@ private void getDims(ArrayList<Set<String>> dimsList, Set<String> dims, String[]
     private boolean hasSingle(ArrayList<Set<String>> dimsList) {
         boolean hasSingle = false;
         for (Set<String> dims : dimsList) {
-            if (dims.size() < 2)
+            if (dims.size() < 2) {
                 hasSingle = true;
+                break;
+            }
         }
         return hasSingle;
     }
 
-    private boolean hasOverlap(ArrayList<Set<String>> dimsList, Set<String> Dims) {
-        boolean hasOverlap = false;
-        int dimSize = 0;
+    private Pair<Boolean, Set<String>> hasOverlap(ArrayList<Set<String>> dimsList, Set<String> Dims) {
+        Set<String> existing = new TreeSet<>();
+        Set<String> overlap = new TreeSet<>();
         for (Set<String> dims : dimsList) {
-            dimSize += dims.size();
+            if (CollectionUtils.containsAny(existing, dims)) {
+                overlap.addAll(Sets.intersection(existing, dims));
+            }
+            existing.addAll(dims);
         }
-        if (dimSize != Dims.size())
-            hasOverlap = true;
-        return hasOverlap;
+        return new Pair<>(overlap.size() > 0, overlap);
     }
 
     private void initDimensionColumns() {
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableEnrich.java b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableEnrich.java
index 8af2297724..979af76d60 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableEnrich.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeJoinedFlatTableEnrich.java
@@ -20,7 +20,6 @@
 
 import java.util.List;
 
-import org.apache.kylin.common.util.BytesSplitter;
 import org.apache.kylin.cube.cuboid.Cuboid;
 import org.apache.kylin.metadata.model.DataModelDesc;
 import org.apache.kylin.metadata.model.FunctionDesc;
@@ -81,16 +80,6 @@ private void parseCubeDesc() {
         }
     }
 
-    // sanity check the input record (in bytes) matches what's expected
-    public void sanityCheck(BytesSplitter bytesSplitter) {
-        int columnCount = flatDesc.getAllColumns().size();
-        if (columnCount != bytesSplitter.getBufferSize()) {
-            throw new IllegalArgumentException("Expect " + columnCount + " columns, but see " + bytesSplitter.getBufferSize() + " -- " + bytesSplitter);
-        }
-
-        // TODO: check data types here
-    }
-
     public CubeDesc getCubeDesc() {
         return cubeDesc;
     }
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/AggregationGroupRule.java b/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/AggregationGroupRule.java
index 7b9078241b..55cb844aec 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/AggregationGroupRule.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/AggregationGroupRule.java
@@ -18,10 +18,12 @@
 
 package org.apache.kylin.cube.model.validation.rule;
 
-import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
 import java.util.Set;
 import java.util.TreeSet;
 
+import com.google.common.collect.Sets;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.kylin.cube.model.AggregationGroup;
 import org.apache.kylin.cube.model.CubeDesc;
@@ -29,6 +31,9 @@
 import org.apache.kylin.cube.model.validation.ResultLevel;
 import org.apache.kylin.cube.model.validation.ValidateContext;
 
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+
 /**
  *  find forbid overlaps in each AggregationGroup
  *  the include dims in AggregationGroup must contain all mandatory, hierarchy and joint
@@ -40,21 +45,24 @@ public void validate(CubeDesc cube, ValidateContext context) {
         inner(cube, context);
     }
 
+    public AggregationGroupRule() {
+    }
+
     private void inner(CubeDesc cube, ValidateContext context) {
 
         int index = 0;
         for (AggregationGroup agg : cube.getAggregationGroups()) {
             if (agg.getIncludes() == null) {
-                context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " includes field not set");
+                context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " 'includes' field not set");
                 continue;
             }
 
             if (agg.getSelectRule() == null) {
-                context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " select rule field not set");
+                context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " 'select rule' field not set");
                 continue;
             }
 
-            int combination = 1;
+            long combination = 1;
             Set<String> includeDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
             if (agg.getIncludes() != null) {
                 for (String include : agg.getIncludes()) {
@@ -90,7 +98,14 @@ private void inner(CubeDesc cube, ValidateContext context) {
             }
 
             if (!includeDims.containsAll(mandatoryDims) || !includeDims.containsAll(hierarchyDims) || !includeDims.containsAll(jointDims)) {
-                context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " Include dims not containing all the used dims");
+                List<String> notIncluded = Lists.newArrayList();
+                final Iterable<String> all = Iterables.unmodifiableIterable(Iterables.concat(mandatoryDims, hierarchyDims, jointDims));
+                for (String dim : all) {
+                    if (includeDims.contains(dim) == false) {
+                        notIncluded.add(dim);
+                    }
+                }
+                context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " 'includes' dimensions not include all the dimensions:" + notIncluded.toString());
                 continue;
             }
 
@@ -100,14 +115,18 @@ private void inner(CubeDesc cube, ValidateContext context) {
             normalDims.removeAll(hierarchyDims);
             normalDims.removeAll(jointDims);
 
-            combination = combination * (1 << normalDims.size());
+            combination = combination * (1L << normalDims.size());
 
             if (CollectionUtils.containsAny(mandatoryDims, hierarchyDims)) {
-                context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " mandatory dims overlap with hierarchy dims");
+                Set<String> intersection = new TreeSet<>(mandatoryDims);
+                intersection.retainAll(hierarchyDims);
+                context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " mandatory dimension has overlap with hierarchy dimension: " + intersection.toString());
                 continue;
             }
             if (CollectionUtils.containsAny(mandatoryDims, jointDims)) {
-                context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " mandatory dims overlap with joint dims");
+                Set<String> intersection = new HashSet<>(mandatoryDims);
+                intersection.retainAll(jointDims);
+                context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " mandatory dimension has overlap with joint dimension: " + intersection.toString());
                 continue;
             }
 
@@ -121,7 +140,7 @@ private void inner(CubeDesc cube, ValidateContext context) {
                     }
 
                     if (oneJoint.size() < 2) {
-                        context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " require at least 2 dims in a joint");
+                        context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " require at least 2 dimensions in a joint: " + oneJoint.toString());
                         continue;
                     }
                     jointDimNum += oneJoint.size();
@@ -129,33 +148,49 @@ private void inner(CubeDesc cube, ValidateContext context) {
                     int overlapHierarchies = 0;
                     if (agg.getSelectRule().hierarchy_dims != null) {
                         for (String[] oneHierarchy : agg.getSelectRule().hierarchy_dims) {
-                            Set<String> share = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
-                            share.addAll(CollectionUtils.intersection(oneJoint, Arrays.asList(oneHierarchy)));
+                            Set<String> oneHierarchySet = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
+                            for (String s : oneHierarchy) {
+                                oneHierarchySet.add(s);
+                            }
+                            Set<String> share = Sets.intersection(oneJoint, oneHierarchySet);
 
                             if (!share.isEmpty()) {
                                 overlapHierarchies++;
                             }
                             if (share.size() > 1) {
-                                context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " joint columns overlap with more than 1 dim in same hierarchy");
+                                context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " joint dimensions has overlap with more than 1 dimensions in same hierarchy: " + share.toString());
                                 continue;
                             }
                         }
 
                         if (overlapHierarchies > 1) {
-                            context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " joint columns overlap with more than 1 hierarchies");
+                            context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " joint dimensions has overlap with more than 1 hierarchies");
                             continue;
                         }
                     }
                 }
 
                 if (jointDimNum != jointDims.size()) {
-                    context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " a dim exist in more than one joint");
+
+                    Set<String> existing = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
+                    Set<String> overlap = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
+                    for (String[] joints : agg.getSelectRule().joint_dims) {
+                        Set<String> oneJoint = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
+                        for (String s : joints) {
+                            oneJoint.add(s);
+                        }
+                        if (CollectionUtils.containsAny(existing, oneJoint)) {
+                            overlap.addAll(Sets.intersection(existing, oneJoint));
+                        }
+                        existing.addAll(oneJoint);
+                    }
+                    context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " a dimension exists in more than one joint: " + overlap.toString());
                     continue;
                 }
             }
 
             if (combination > getMaxCombinations(cube)) {
-                String msg = "Aggregation group " + index + " has too many combinations, use 'mandatory'/'hierarchy'/'joint' to optimize; or update 'kylin.cube.aggrgroup.max.combination' to a bigger value.";
+                String msg = "Aggregation group " + index + " has too many combinations, current combination is " + combination + ", max allowed combination is " + getMaxCombinations(cube) + "; use 'mandatory'/'hierarchy'/'joint' to optimize; or update 'kylin.cube.aggrgroup.max.combination' to a bigger value.";
                 context.addResult(ResultLevel.ERROR, msg);
                 continue;
             }
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/StorageSideBehavior.java b/core-cube/src/main/java/org/apache/kylin/gridtable/StorageSideBehavior.java
index b01ac3f947..d87b41bfa6 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/StorageSideBehavior.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/StorageSideBehavior.java
@@ -26,7 +26,7 @@
     SCAN_FILTER, //only scan+filter used,used for profiling filter speed.  Will not return any result
     SCAN_FILTER_AGGR, //aggregate the result.  Will return results
     SCAN_FILTER_AGGR_CHECKMEM, //default full operations. Will return results
-    SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY; // on each scan operation, delay for 10s to simulate slow queries, for test use
+    SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY; // on each scan operation, delay for 10ms to simulate slow queries, for test use
 
     public boolean filterToggledOn() {
         return this.ordinal() >= SCAN_FILTER.ordinal();
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/AggregationGroupRuleTest.java b/core-cube/src/test/java/org/apache/kylin/cube/AggregationGroupRuleTest.java
index a06ce1b77b..4f42693092 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/AggregationGroupRuleTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/AggregationGroupRuleTest.java
@@ -23,6 +23,7 @@
 
 import java.io.File;
 import java.io.FileInputStream;
+import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.Arrays;
 
@@ -65,7 +66,7 @@ protected int getMaxCombinations(CubeDesc cubeDesc) {
             rule.validate(desc, vContext);
             vContext.print(System.out);
             assertTrue(vContext.getResults().length > 0);
-            assertEquals("Aggregation group 0 has too many combinations, use 'mandatory'/'hierarchy'/'joint' to optimize; or update 'kylin.cube.aggrgroup.max.combination' to a bigger value.", (vContext.getResults()[0].getMessage()));
+            assertTrue(vContext.getResults()[0].getMessage().startsWith("Aggregation group 0 has too many combinations"));
         }
     }
 
@@ -97,7 +98,7 @@ public void testBadDesc1() throws IOException {
         //        System.out.println(vContext.getResults().length);
         //        System.out.println(vContext.getResults()[0].getMessage());
         assertEquals(1, vContext.getResults().length);
-        assertEquals("Aggregation group 0 Include dims not containing all the used dims", (vContext.getResults()[0].getMessage()));
+        assertEquals("Aggregation group 0 'includes' dimensions not include all the dimensions:[seller_id, META_CATEG_NAME, lstg_format_name, lstg_site_id, slr_segment_cd]", (vContext.getResults()[0].getMessage()));
     }
 
     @Test
@@ -111,8 +112,19 @@ public void testBadDesc2() throws IOException {
         IValidatorRule<CubeDesc> rule = getAggregationGroupRule();
         rule.validate(desc, vContext);
         vContext.print(System.out);
+        assertEquals("Aggregation group 0 joint dimensions has overlap with more than 1 dimensions in same hierarchy: [CATEG_LVL2_NAME, META_CATEG_NAME]", (vContext.getResults()[0].getMessage()));
+    }
+
+    @Test
+    public void testCombinationIntOverflow() throws IOException {
+        ValidateContext vContext = new ValidateContext();
+        CubeDesc desc = JsonUtil.readValue(new FileInputStream(LocalFileMetadataTestCase.LOCALMETA_TEST_DATA + "/cube_desc/ut_cube_desc_combination_int_overflow.json"), CubeDesc.class);
+
+        desc.getAggregationGroups().get(0).getSelectRule().joint_dims = new String[][] { };
+
+        IValidatorRule<CubeDesc> rule = getAggregationGroupRule();
+        rule.validate(desc, vContext);
         assertEquals(1, vContext.getResults().length);
-        assertEquals("Aggregation group 0 joint columns overlap with more than 1 dim in same hierarchy", (vContext.getResults()[0].getMessage()));
     }
 
     public AggregationGroupRule getAggregationGroupRule() {
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java b/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
index db80025ef4..01e5090889 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
@@ -86,7 +86,7 @@ public void testBadInit2() throws Exception {
     @Test
     public void testBadInit3() throws Exception {
         thrown.expect(IllegalStateException.class);
-        thrown.expectMessage("Aggregation group 0 Include dims not containing all the used dims");
+        thrown.expectMessage("Aggregation group 0 'includes' dimensions not include all the dimensions:[SELLER_ID, META_CATEG_NAME, LSTG_FORMAT_NAME, LSTG_SITE_ID, SLR_SEGMENT_CD]");
 
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("test_kylin_cube_with_slr_desc");
         String[] temp = Arrays.asList(cubeDesc.getAggregationGroups().get(0).getIncludes()).subList(0, 3).toArray(new String[3]);
@@ -128,7 +128,7 @@ public void testBadInit6() throws Exception {
     @Test
     public void testBadInit7() throws Exception {
         thrown.expect(IllegalStateException.class);
-        thrown.expectMessage("Aggregation group 0 require at least 2 dims in a joint");
+        thrown.expectMessage("Aggregation group 0 require at least 2 dimensions in a joint");
 
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("test_kylin_cube_with_slr_desc");
         cubeDesc.getAggregationGroups().get(0).getSelectRule().joint_dims = new String[][] { new String[] { "lstg_format_name" } };
@@ -139,7 +139,7 @@ public void testBadInit7() throws Exception {
     @Test
     public void testBadInit8() throws Exception {
         thrown.expect(IllegalStateException.class);
-        thrown.expectMessage("Aggregation group 0 hierarchy dims overlap with joint dims");
+        thrown.expectMessage("Aggregation group 0 hierarchy dimensions overlap with joint dimensions: [CATEG_LVL2_NAME, META_CATEG_NAME]");
 
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("test_kylin_cube_with_slr_desc");
         cubeDesc.getAggregationGroups().get(0).getSelectRule().joint_dims = new String[][] { new String[] { "META_CATEG_NAME", "CATEG_LVL2_NAME" } };
@@ -150,7 +150,7 @@ public void testBadInit8() throws Exception {
     @Test
     public void testBadInit9() throws Exception {
         thrown.expect(IllegalStateException.class);
-        thrown.expectMessage("Aggregation group 0 hierarchy dims overlap with joint dims");
+        thrown.expectMessage("Aggregation group 0 hierarchy dimensions overlap with joint dimensions: [lstg_format_name, META_CATEG_NAME]");
 
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("test_kylin_cube_with_slr_desc");
         cubeDesc.getAggregationGroups().get(0).getSelectRule().hierarchy_dims = new String[][] { new String[] { "META_CATEG_NAME", "CATEG_LVL2_NAME", "CATEG_LVL3_NAME" }, new String[] { "lstg_format_name", "lstg_site_id" } };
@@ -162,7 +162,7 @@ public void testBadInit9() throws Exception {
     @Test
     public void testBadInit10() throws Exception {
         thrown.expect(IllegalStateException.class);
-        thrown.expectMessage("Aggregation group 0 a dim exist in more than one joint");
+        thrown.expectMessage("Aggregation group 0 a dimension exist in more than one joint: [lstg_format_name, lstg_site_id]");
 
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("test_kylin_cube_with_slr_desc");
         cubeDesc.getAggregationGroups().get(0).getSelectRule().joint_dims = new String[][] { new String[] { "lstg_format_name", "lstg_site_id", "slr_segment_cd" }, new String[] { "lstg_format_name", "lstg_site_id", "leaf_categ_id" } };
@@ -173,7 +173,7 @@ public void testBadInit10() throws Exception {
     @Test
     public void testBadInit11() throws Exception {
         thrown.expect(IllegalStateException.class);
-        thrown.expectMessage("Aggregation group 0 require at least 2 dims in a hierarchy");
+        thrown.expectMessage("Aggregation group 0 require at least 2 dimensions in a hierarchy.");
 
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("test_kylin_cube_with_slr_desc");
         cubeDesc.getAggregationGroups().get(0).getSelectRule().hierarchy_dims = new String[][] { new String[] { "META_CATEG_NAME" } };
@@ -184,7 +184,7 @@ public void testBadInit11() throws Exception {
     @Test
     public void testBadInit12() throws Exception {
         thrown.expect(IllegalStateException.class);
-        thrown.expectMessage("Aggregation group 0 a dim exist in more than one hierarchy");
+        thrown.expectMessage("Aggregation group 0 a dimension exist in more than one hierarchy: [CATEG_LVL2_NAME, META_CATEG_NAME]");
 
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("test_kylin_cube_with_slr_desc");
         cubeDesc.getAggregationGroups().get(0).getSelectRule().hierarchy_dims = new String[][] { new String[] { "META_CATEG_NAME", "CATEG_LVL2_NAME", "CATEG_LVL3_NAME" }, new String[] { "META_CATEG_NAME", "CATEG_LVL2_NAME" } };
@@ -192,6 +192,14 @@ public void testBadInit12() throws Exception {
         cubeDesc.init(getTestConfig());
     }
 
+    @Test
+    public void testCombinationIntOverflow() throws  Exception {
+        thrown.expect(IllegalStateException.class);
+        CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("ut_cube_desc_combination_int_overflow");
+        cubeDesc.getAggregationGroups().get(0).getSelectRule().joint_dims = new String[][] { };
+        cubeDesc.init(getTestConfig());
+    }
+
     @Test
     public void testSerialize() throws Exception {
         CubeDesc desc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc("test_kylin_cube_with_slr_desc");
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/CubeManagerTest.java b/core-cube/src/test/java/org/apache/kylin/cube/CubeManagerTest.java
index bb90d29984..2904eb2e65 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/CubeManagerTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/CubeManagerTest.java
@@ -111,7 +111,7 @@ public void testAutoMergeNormal() throws Exception {
         CubeSegment seg1 = mgr.appendSegment(cube, 0, 1000, 0, 0, null, null);
         seg1.setStatus(SegmentStatusEnum.READY);
 
-        CubeSegment seg2 = mgr.appendSegment(cube, 0, 2000, 0, 0, null, null);
+        CubeSegment seg2 = mgr.appendSegment(cube, 1000, 2000, 0, 0, null, null);
         seg2.setStatus(SegmentStatusEnum.READY);
 
         CubeUpdate cubeBuilder = new CubeUpdate(cube);
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/CubeSegmentsTest.java b/core-cube/src/test/java/org/apache/kylin/cube/CubeSegmentsTest.java
index 828a3a98c4..a5bd821e12 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/CubeSegmentsTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/CubeSegmentsTest.java
@@ -110,7 +110,7 @@ public void testPartitioned() throws IOException {
         seg1.setStatus(SegmentStatusEnum.READY);
 
         // append second
-        CubeSegment seg2 = mgr.appendSegment(cube, 0, 2000);
+        CubeSegment seg2 = mgr.appendSegment(cube, 1000, 2000);
 
         assertEquals(2, cube.getSegments().size());
         assertEquals(1000, seg2.getDateRangeStart());
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/cuboid/CuboidSchedulerTest.java b/core-cube/src/test/java/org/apache/kylin/cube/cuboid/CuboidSchedulerTest.java
index ecb12002e6..bdceb0fa4d 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/cuboid/CuboidSchedulerTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/cuboid/CuboidSchedulerTest.java
@@ -24,6 +24,7 @@
 
 import java.util.Arrays;
 import java.util.Collection;
+import java.util.List;
 
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
 import org.apache.kylin.cube.CubeDescManager;
@@ -283,6 +284,18 @@ public void testCuboidCounts5() {
         assertEquals(cuboidScheduler.getCuboidCount(), sum);
     }
 
+    @Test
+    public void testCuboid_onlyBaseCuboid() {
+        CubeDesc cube = getCubeDescManager().getCubeDesc("ut_large_dimension_number");
+        CuboidScheduler scheduler = new CuboidScheduler(cube);
+        
+        Cuboid baseCuboid = Cuboid.getBaseCuboid(cube);
+        assertTrue(Cuboid.isValid(cube, baseCuboid.getId()));
+        
+        List<Long> spanningChild = scheduler.getSpanningCuboid(baseCuboid.getId());
+        assertTrue(spanningChild.size() > 0);
+    }
+
     public CubeDescManager getCubeDescManager() {
         return CubeDescManager.getInstance(getTestConfig());
     }
diff --git a/core-dictionary/pom.xml b/core-dictionary/pom.xml
index 1bf97afee9..d7122726bc 100644
--- a/core-dictionary/pom.xml
+++ b/core-dictionary/pom.xml
@@ -17,8 +17,7 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
     <artifactId>kylin-core-dictionary</artifactId>
@@ -28,7 +27,7 @@
     <parent>
         <groupId>org.apache.kylin</groupId>
         <artifactId>kylin</artifactId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
 
     </parent>
 
diff --git a/core-job/pom.xml b/core-job/pom.xml
index 601f730694..04642a8a54 100644
--- a/core-job/pom.xml
+++ b/core-job/pom.xml
@@ -17,8 +17,7 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
     <artifactId>kylin-core-job</artifactId>
@@ -28,7 +27,7 @@
     <parent>
         <groupId>org.apache.kylin</groupId>
         <artifactId>kylin</artifactId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
 
     </parent>
 
diff --git a/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java b/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
index 3cc27ba3b5..c5f38e7853 100644
--- a/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/JoinedFlatTable.java
@@ -92,8 +92,6 @@ public static String generateCreateTableStatement(IJoinedFlatTableDesc flatDesc,
             ddl.append(colName(col.getCanonicalName()) + " " + getHiveDataType(col.getDatatype()) + "\n");
         }
         ddl.append(")" + "\n");
-
-        ddl.append("ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\177'" + "\n");
         ddl.append("STORED AS SEQUENCEFILE" + "\n");
         ddl.append("LOCATION '" + getTableDir(flatDesc, storageDfsDir) + "';").append("\n");
         // ddl.append("TBLPROPERTIES ('serialization.null.format'='\\\\N')" +
@@ -128,15 +126,17 @@ public static String generateSelectDataStatement(IJoinedFlatTableDesc flatDesc,
             sql.append(tableAlias + "." + col.getName() + "\n");
         }
         appendJoinStatement(flatDesc, sql, tableAliasMap);
-        appendWhereStatement(flatDesc, sql, tableAliasMap);
-        if (redistribute == true) {
-            String redistributeCol = null;
-            TblColRef distDcol = flatDesc.getDistributedBy();
-            if (distDcol != null) {
-                String tblAlias = tableAliasMap.get(distDcol.getTable());
-                redistributeCol = tblAlias + "." + distDcol.getName();
+        if (flatDesc.getSegment() != null) {
+            appendWhereStatement(flatDesc, sql, tableAliasMap);
+            if (redistribute == true) {
+                String redistributeCol = null;
+                TblColRef distDcol = flatDesc.getDistributedBy();
+                if (distDcol != null) {
+                    String tblAlias = tableAliasMap.get(distDcol.getTable());
+                    redistributeCol = tblAlias + "." + distDcol.getName();
+                }
+                appendDistributeStatement(sql, redistributeCol);
             }
-            appendDistributeStatement(sql, redistributeCol);
         }
         return sql.toString();
     }
diff --git a/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java b/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
index ad0b1b1c88..cec2e5df6f 100644
--- a/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
+++ b/core-job/src/main/java/org/apache/kylin/job/constant/ExecutableConstants.java
@@ -52,6 +52,7 @@ private ExecutableConstants() {
     public static final String STEP_NAME_MERGE_CUBOID = "Merge Cuboid Data";
     public static final String STEP_NAME_UPDATE_CUBE_INFO = "Update Cube Info";
     public static final String STEP_NAME_HIVE_CLEANUP = "Hive Cleanup";
+    public static final String STEP_NAME_KAFKA_CLEANUP = "Kafka Intermediate File Cleanup";
     public static final String STEP_NAME_GARBAGE_COLLECTION = "Garbage Collection";
     public static final String STEP_NAME_GARBAGE_COLLECTION_HDFS = "Garbage Collection on HDFS";
     public static final String STEP_NAME_BUILD_II = "Build Inverted Index";
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java b/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
index b4ca469b43..90e4d3cb84 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
@@ -74,8 +74,6 @@ protected void onExecuteFinished(ExecuteResult result, ExecutableContext executa
         if (!isDiscarded()) {
             if (result.succeed()) {
                 executableManager.updateJobOutput(getId(), ExecutableState.SUCCEED, null, result.output());
-            } else if (result.discarded()) {
-                executableManager.updateJobOutput(getId(), ExecutableState.DISCARDED, null, result.output());
             } else {
                 executableManager.updateJobOutput(getId(), ExecutableState.ERROR, null, result.output());
             }
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java b/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
index 5a57b051fd..39a5f4f57d 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/DefaultChainedExecutable.java
@@ -119,8 +119,6 @@ protected void onExecuteFinished(ExecuteResult result, ExecutableContext executa
             } else {
                 jobService.updateJobOutput(getId(), ExecutableState.READY, null, null);
             }
-        } else if (result.discarded()) {
-            jobService.updateJobOutput(getId(), ExecutableState.DISCARDED, null, result.output());
         } else {
             setEndTime(System.currentTimeMillis());
             jobService.updateJobOutput(getId(), ExecutableState.ERROR, null, result.output());
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/ExecuteResult.java b/core-job/src/main/java/org/apache/kylin/job/execution/ExecuteResult.java
index 2347e7db6b..760a574878 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/ExecuteResult.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/ExecuteResult.java
@@ -49,10 +49,6 @@ public boolean succeed() {
         return state == State.SUCCEED;
     }
 
-    public boolean discarded() {
-        return state == State.DISCARDED;
-    }
-
     public String output() {
         return output;
     }
diff --git a/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java b/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java
index 2baf10a50e..df521f95e0 100644
--- a/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java
+++ b/core-job/src/test/java/org/apache/kylin/job/impl/threadpool/DefaultSchedulerTest.java
@@ -29,7 +29,6 @@
 import java.util.concurrent.ScheduledFuture;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.kylin.job.DiscardedTestExecutable;
 import org.apache.kylin.job.BaseTestExecutable;
 import org.apache.kylin.job.ErrorTestExecutable;
 import org.apache.kylin.job.FailedTestExecutable;
@@ -83,21 +82,6 @@ public void testSucceedAndFailed() throws Exception {
         Assert.assertEquals(ExecutableState.ERROR, jobService.getOutput(task2.getId()).getState());
     }
 
-    @Test
-    public void testSucceedAndDiscarded() throws Exception {
-        DefaultChainedExecutable job = new DefaultChainedExecutable();
-        BaseTestExecutable task1 = new SucceedTestExecutable();
-        BaseTestExecutable task2 = new DiscardedTestExecutable();
-        job.addTask(task1);
-        job.addTask(task2);
-        jobService.addJob(job);
-        waitForJobFinish(job.getId());
-        Assert.assertEquals(ExecutableState.DISCARDED, jobService.getOutput(job.getId()).getState());
-        Assert.assertEquals(ExecutableState.SUCCEED, jobService.getOutput(task1.getId()).getState());
-        Assert.assertEquals(ExecutableState.DISCARDED, jobService.getOutput(task2.getId()).getState());
-    }
-
-
     @Test
     public void testSucceedAndError() throws Exception {
         DefaultChainedExecutable job = new DefaultChainedExecutable();
diff --git a/core-metadata/pom.xml b/core-metadata/pom.xml
index c95f7f00a3..b0193eb6ef 100644
--- a/core-metadata/pom.xml
+++ b/core-metadata/pom.xml
@@ -17,8 +17,7 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
     <artifactId>kylin-core-metadata</artifactId>
@@ -28,7 +27,7 @@
     <parent>
         <groupId>org.apache.kylin</groupId>
         <artifactId>kylin</artifactId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
 
     </parent>
 
diff --git a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
index 8c986722a6..f34c57c555 100644
--- a/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
+++ b/core-metadata/src/main/java/org/apache/kylin/measure/topn/TopNMeasureType.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.measure.topn;
 
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.Iterator;
 import java.util.List;
@@ -39,8 +40,8 @@
 import org.apache.kylin.metadata.model.FunctionDesc;
 import org.apache.kylin.metadata.model.MeasureDesc;
 import org.apache.kylin.metadata.model.TblColRef;
-import org.apache.kylin.metadata.realization.SQLDigest;
 import org.apache.kylin.metadata.realization.CapabilityResult.CapabilityInfluence;
+import org.apache.kylin.metadata.realization.SQLDigest;
 import org.apache.kylin.metadata.tuple.Tuple;
 import org.apache.kylin.metadata.tuple.TupleInfo;
 import org.slf4j.Logger;
@@ -142,8 +143,12 @@ public boolean isMemoryHungry() {
                 final ByteArray key = new ByteArray(keyLength);
                 int offset = 0;
                 for (int i = 0; i < dimensionEncodings.length; i++) {
-                    byte[] valueBytes = Bytes.toBytes(values[i + 1]);
-                    dimensionEncodings[i].encode(valueBytes, valueBytes.length, key.array(), offset);
+                    if (values[i + 1] == null) {
+                        Arrays.fill(key.array(), offset, offset + dimensionEncodings[i].getLengthOfEncoding(), DimensionEncoding.NULL);
+                    } else {
+                        byte[] valueBytes = Bytes.toBytes(values[i + 1]);
+                        dimensionEncodings[i].encode(valueBytes, valueBytes.length, key.array(), offset);
+                    }
                     offset += dimensionEncodings[i].getLengthOfEncoding();
                 }
 
@@ -307,15 +312,16 @@ public boolean needRewrite() {
 
     @Override
     public void adjustSqlDigest(List<MeasureDesc> measureDescs, SQLDigest sqlDigest) {
+        if (sqlDigest.aggregations.size() > 1) {
+            return;
+        }
+
         for (MeasureDesc measureDesc : measureDescs) {
             FunctionDesc topnFunc = measureDesc.getFunction();
             List<TblColRef> topnLiteralCol = getTopNLiteralColumn(topnFunc);
 
-            if (sqlDigest.groupbyColumns.containsAll(topnLiteralCol) == false)
-                return;
-
-            if (sqlDigest.aggregations.size() > 1) {
-                return;
+            if (sqlDigest.groupbyColumns.containsAll(topnLiteralCol) == false) {
+                continue;
             }
 
             if (sqlDigest.aggregations.size() > 0) {
@@ -324,12 +330,18 @@ public void adjustSqlDigest(List<MeasureDesc> measureDescs, SQLDigest sqlDigest)
                     logger.warn("When query with topN, only SUM/Count function is allowed.");
                     return;
                 }
+
+                if (isTopNCompatibleSum(measureDesc.getFunction(), origFunc) == false) {
+                    continue;
+                }
+
                 logger.info("Rewrite function " + origFunc + " to " + topnFunc);
             }
 
             sqlDigest.aggregations = Lists.newArrayList(topnFunc);
             sqlDigest.groupbyColumns.removeAll(topnLiteralCol);
             sqlDigest.metricColumns.addAll(topnLiteralCol);
+            break;
         }
     }
 
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/tuple/Tuple.java b/core-metadata/src/main/java/org/apache/kylin/metadata/tuple/Tuple.java
index aaf9aa9bea..9074403923 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/tuple/Tuple.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/tuple/Tuple.java
@@ -178,7 +178,7 @@ public static Object convertOptiqCellValue(String strValue, String dataTypeName)
         if (strValue == null)
             return null;
 
-        if ((strValue.equals("") || strValue.equals("\\N")) && !dataTypeName.equals("string"))
+        if ((strValue.equals("") || strValue.equals("\\N")) && !dataTypeName.equals("string") && !dataTypeName.startsWith("varchar"))
             return null;
 
         // TODO use data type enum instead of string comparison
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/ISource.java b/core-metadata/src/main/java/org/apache/kylin/source/ISource.java
index e9216f9bcf..5bff8a7c94 100644
--- a/core-metadata/src/main/java/org/apache/kylin/source/ISource.java
+++ b/core-metadata/src/main/java/org/apache/kylin/source/ISource.java
@@ -18,15 +18,18 @@
 
 package org.apache.kylin.source;
 
-import org.apache.kylin.metadata.model.TableDesc;
-
 import java.util.List;
 
+import org.apache.kylin.metadata.model.IBuildable;
+import org.apache.kylin.metadata.model.TableDesc;
+
 public interface ISource {
 
-    public <I> I adaptToBuildEngine(Class<I> engineInterface);
+    <I> I adaptToBuildEngine(Class<I> engineInterface);
+
+    ReadableTable createReadableTable(TableDesc tableDesc);
 
-    public ReadableTable createReadableTable(TableDesc tableDesc);
+    List<String> getMRDependentResources(TableDesc table);
 
-    public List<String> getMRDependentResources(TableDesc table);
+    SourcePartition parsePartitionBeforeBuild(IBuildable buildable, SourcePartition srcPartition);
 }
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/SourceFactory.java b/core-metadata/src/main/java/org/apache/kylin/source/SourceFactory.java
index e82c6ed08b..5ce9014874 100644
--- a/core-metadata/src/main/java/org/apache/kylin/source/SourceFactory.java
+++ b/core-metadata/src/main/java/org/apache/kylin/source/SourceFactory.java
@@ -31,7 +31,7 @@
     private static ImplementationSwitch<ISource> sources;
     static {
         Map<Integer, String> impls = KylinConfig.getInstanceFromEnv().getSourceEngines();
-        sources = new ImplementationSwitch<ISource>(impls, ISource.class);
+        sources = new ImplementationSwitch<>(impls, ISource.class);
     }
 
     public static ISource tableSource(ISourceAware aware) {
diff --git a/core-metadata/src/main/java/org/apache/kylin/source/SourcePartition.java b/core-metadata/src/main/java/org/apache/kylin/source/SourcePartition.java
new file mode 100644
index 0000000000..e48970498e
--- /dev/null
+++ b/core-metadata/src/main/java/org/apache/kylin/source/SourcePartition.java
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.source;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import com.google.common.base.Objects;
+
+/**
+ */
+public class SourcePartition {
+    long startDate;
+    long endDate;
+    long startOffset;
+    long endOffset;
+    Map<Integer, Long> sourcePartitionOffsetStart;
+    Map<Integer, Long> sourcePartitionOffsetEnd;
+
+    public SourcePartition() {
+    }
+
+    public SourcePartition(long startDate, long endDate, long startOffset, long endOffset, Map<Integer, Long> sourcePartitionOffsetStart, Map<Integer, Long> sourcePartitionOffsetEnd) {
+        this.startDate = startDate;
+        this.endDate = endDate;
+        this.startOffset = startOffset;
+        this.endOffset = endOffset;
+        this.sourcePartitionOffsetStart = sourcePartitionOffsetStart;
+        this.sourcePartitionOffsetEnd = sourcePartitionOffsetEnd;
+    }
+
+    public long getStartDate() {
+        return startDate;
+    }
+
+    public void setStartDate(long startDate) {
+        this.startDate = startDate;
+    }
+
+    public long getEndDate() {
+        return endDate;
+    }
+
+    public void setEndDate(long endDate) {
+        this.endDate = endDate;
+    }
+
+    public long getStartOffset() {
+        return startOffset;
+    }
+
+    public void setStartOffset(long startOffset) {
+        this.startOffset = startOffset;
+    }
+
+    public long getEndOffset() {
+        return endOffset;
+    }
+
+    public void setEndOffset(long endOffset) {
+        this.endOffset = endOffset;
+    }
+
+    public Map<Integer, Long> getSourcePartitionOffsetStart() {
+        return sourcePartitionOffsetStart;
+    }
+
+    public void setSourcePartitionOffsetStart(Map<Integer, Long> sourcePartitionOffsetStart) {
+        this.sourcePartitionOffsetStart = sourcePartitionOffsetStart;
+    }
+
+    public Map<Integer, Long> getSourcePartitionOffsetEnd() {
+        return sourcePartitionOffsetEnd;
+    }
+
+    public void setSourcePartitionOffsetEnd(Map<Integer, Long> sourcePartitionOffsetEnd) {
+        this.sourcePartitionOffsetEnd = sourcePartitionOffsetEnd;
+    }
+
+    @Override
+    public String toString() {
+        return Objects.toStringHelper(this).add("startDate", startDate).add("endDate", endDate).add("startOffset", startOffset).add("endOffset", endOffset).add("sourcePartitionOffsetStart", sourcePartitionOffsetStart.toString()).add("sourcePartitionOffsetEnd", sourcePartitionOffsetEnd.toString()).toString();
+    }
+
+    public static SourcePartition getCopyOf(SourcePartition origin) {
+        SourcePartition copy = new SourcePartition();
+        copy.setStartDate(origin.getStartDate());
+        copy.setEndDate(origin.getEndDate());
+        copy.setStartOffset(origin.getStartOffset());
+        copy.setEndOffset(origin.getEndOffset());
+        if (origin.getSourcePartitionOffsetStart() != null) {
+            copy.setSourcePartitionOffsetStart(new HashMap<>(origin.getSourcePartitionOffsetStart()));
+        }
+        if (origin.getSourcePartitionOffsetEnd() != null) {
+            copy.setSourcePartitionOffsetEnd(new HashMap<>(origin.getSourcePartitionOffsetEnd()));
+        }
+        return copy;
+    }
+}
diff --git a/core-metadata/src/test/java/org/apache/kylin/measure/AggregatorMemEstimateTest.java b/core-metadata/src/test/java/org/apache/kylin/measure/AggregatorMemEstimateTest.java
index 2883923186..3adec738b2 100644
--- a/core-metadata/src/test/java/org/apache/kylin/measure/AggregatorMemEstimateTest.java
+++ b/core-metadata/src/test/java/org/apache/kylin/measure/AggregatorMemEstimateTest.java
@@ -1,3 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.kylin.measure;
 
 import com.google.common.collect.Lists;
diff --git a/core-storage/pom.xml b/core-storage/pom.xml
index 2d9d618741..ede1dfde2a 100644
--- a/core-storage/pom.xml
+++ b/core-storage/pom.xml
@@ -17,8 +17,7 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
     <artifactId>kylin-core-storage</artifactId>
@@ -28,7 +27,7 @@
     <parent>
         <groupId>org.apache.kylin</groupId>
         <artifactId>kylin</artifactId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
 
     </parent>
 
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java b/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java
index cc3991883d..b338b3c73b 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/StorageContext.java
@@ -135,6 +135,7 @@ public void setFinalPushDownLimit(IRealization realization) {
                     tempPushDownLimit, pushDownLimitMax);
         } else {
             this.finalPushDownLimit = tempPushDownLimit;
+            logger.info("Enable limit: " + tempPushDownLimit);
         }
     }
 
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java
index 6e19c091d4..c6a6daa083 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java
@@ -53,6 +53,9 @@
 
     public CubeSegmentScanner(CubeSegment cubeSeg, Cuboid cuboid, Set<TblColRef> dimensions, Set<TblColRef> groups, //
             Collection<FunctionDesc> metrics, TupleFilter originalfilter, StorageContext context, String gtStorage) {
+        
+        logger.info("Init CubeSegmentScanner for segment {}", cubeSeg.getName());
+        
         this.cuboid = cuboid;
         this.cubeSeg = cubeSeg;
 
@@ -61,7 +64,7 @@ public CubeSegmentScanner(CubeSegment cubeSeg, Cuboid cuboid, Set<TblColRef> dim
         //is working on its own copy
         byte[] serialize = TupleFilterSerializer.serialize(originalfilter, StringCodeSystem.INSTANCE);
         TupleFilter filter = TupleFilterSerializer.deserialize(serialize, StringCodeSystem.INSTANCE);
-        
+
         // translate FunctionTupleFilter to IN clause
         ITupleFilterTransformer translator = new BuiltInFunctionTransformer(cubeSeg.getDimensionEncodingMap());
         filter = translator.transform(filter);
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
index fa23be1593..5e8af883e1 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/GTCubeStorageQueryBase.java
@@ -43,7 +43,6 @@
 import org.apache.kylin.metadata.filter.TupleFilter.FilterOperatorEnum;
 import org.apache.kylin.metadata.model.FunctionDesc;
 import org.apache.kylin.metadata.model.MeasureDesc;
-import org.apache.kylin.metadata.model.PartitionDesc;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.model.TblColRef;
 import org.apache.kylin.metadata.realization.SQLDigest;
@@ -103,46 +102,32 @@ public ITupleIterator search(StorageContext context, SQLDigest sqlDigest, TupleI
         dimensionsD.addAll(groupsD);
         dimensionsD.addAll(otherDimsD);
         Cuboid cuboid = Cuboid.identifyCuboid(cubeDesc, dimensionsD, metrics);
-        logger.info("Cuboid identified: cube={}, cuboidId={}", cubeInstance.getName(), cuboid.getId());
+        logger.info("Cuboid identified: cube={}, cuboidId={}, groupsD={}, otherDimsD={}", cubeInstance.getName(), cuboid.getId(), groupsD, otherDimsD);
         context.setCuboid(cuboid);
 
-        // isExactAggregation? meaning: tuples returned from storage requires no further aggregation in query engine
+        // set whether to aggr at storage
         Set<TblColRef> singleValuesD = findSingleValueColumns(filter);
-        boolean exactAggregation = isExactAggregation(cuboid, groups, otherDimsD, singleValuesD, derivedPostAggregation);
-        context.setExactAggregation(exactAggregation);
+        context.setNeedStorageAggregation(isNeedStorageAggregation(cuboid, groupsD, singleValuesD));
 
         // replace derived columns in filter with host columns; columns on loosened condition must be added to group by
-        TupleFilter filterD = translateDerived(filter, groupsD);
+        Set<TblColRef> loosenedColumnD = Sets.newHashSet();
+        TupleFilter filterD = translateDerived(filter, loosenedColumnD);
+        groupsD.addAll(loosenedColumnD);
 
-        //set whether to aggr at storage
-        context.setNeedStorageAggregation(isNeedStorageAggregation(cuboid, groupsD, singleValuesD));
         // set limit push down
-        enableStorageLimitIfPossible(cuboid, groups, derivedPostAggregation, groupsD, filter, sqlDigest.aggregations, context);
-        context.setFinalPushDownLimit(cubeInstance);
+        enableStorageLimitIfPossible(cuboid, groups, derivedPostAggregation, groupsD, filter, loosenedColumnD, sqlDigest.aggregations, context);
         // set cautious threshold to prevent out of memory
         setThresholdIfNecessary(dimensionsD, metrics, context);
 
         List<CubeSegmentScanner> scanners = Lists.newArrayList();
         for (CubeSegment cubeSeg : cubeInstance.getSegments(SegmentStatusEnum.READY)) {
             CubeSegmentScanner scanner;
-            if (cubeSeg.getInputRecords() == 0) {
-                if (!skipZeroInputSegment(cubeSeg)) {
-                    logger.warn("cube segment {} input record is 0, " + "it may caused by kylin failed to the job counter " + "as the hadoop history server wasn't running", cubeSeg);
-                } else {
-                    logger.warn("cube segment {} input record is 0, skip it ", cubeSeg);
-                    continue;
-                }
-            }
-            try {
-                scanner = new CubeSegmentScanner(cubeSeg, cuboid, dimensionsD, groupsD, metrics, filterD, context, getGTStorage());
-            } catch (IllegalArgumentException ex) {
-                // ref KYLIN-1967, real empty segment can trigger dictionary exception -- IllegalArgumentException: Value not exists!
-                if (cubeSeg.getInputRecords() == 0) {
-                    logger.warn("cube segment {} input record is 0, skip it still", cubeSeg);
-                    continue;
-                }
-                throw ex;
+            if (cubeDesc.getConfig().isSkippingEmptySegments() && cubeSeg.getInputRecords() == 0) {
+                logger.info("Skip cube segment {} because its input record is 0", cubeSeg);
+                continue;
             }
+
+            scanner = new CubeSegmentScanner(cubeSeg, cuboid, dimensionsD, groupsD, metrics, filterD, context, getGTStorage());
             scanners.add(scanner);
         }
 
@@ -152,10 +137,6 @@ public ITupleIterator search(StorageContext context, SQLDigest sqlDigest, TupleI
         return new SequentialCubeTupleIterator(scanners, cuboid, dimensionsD, metrics, returnTupleInfo, context);
     }
 
-    protected boolean skipZeroInputSegment(CubeSegment cubeSegment) {
-        return false;
-    }
-
     protected abstract String getGTStorage();
 
     private void buildDimensionsAndMetrics(SQLDigest sqlDigest, Collection<TblColRef> dimensions, Collection<FunctionDesc> metrics) {
@@ -262,45 +243,6 @@ public boolean isNeedStorageAggregation(Cuboid cuboid, Collection<TblColRef> gro
         }
     }
 
-    //exact aggregation was introduced back when we had some measures (like holistic distinct count) that is sensitive
-    //to post aggregation. Now that we don't have such measure any more, isExactAggregation should be useless (at least in v2 storage and above)
-    public boolean isExactAggregation(Cuboid cuboid, Collection<TblColRef> groups, Set<TblColRef> othersD, Set<TblColRef> singleValuesD, Set<TblColRef> derivedPostAggregation) {
-        boolean exact = true;
-
-        if (cuboid.requirePostAggregation()) {
-            exact = false;
-            logger.info("exactAggregation is false because cuboid " + cuboid.getInputID() + "=> " + cuboid.getId());
-        }
-
-        // derived aggregation is bad, unless expanded columns are already in group by
-        if (groups.containsAll(derivedPostAggregation) == false) {
-            exact = false;
-            logger.info("exactAggregation is false because derived column require post aggregation: " + derivedPostAggregation);
-        }
-
-        // other columns (from filter) is bad, unless they are ensured to have single value
-        if (singleValuesD.containsAll(othersD) == false) {
-            exact = false;
-            logger.info("exactAggregation is false because some column not on group by: " + othersD //
-                    + " (single value column: " + singleValuesD + ")");
-        }
-
-        // for partitioned cube, the partition column must belong to group by or has single value
-        PartitionDesc partDesc = cuboid.getCubeDesc().getModel().getPartitionDesc();
-        if (partDesc.isPartitioned()) {
-            TblColRef col = partDesc.getPartitionDateColumnRef();
-            if (!groups.contains(col) && !singleValuesD.contains(col)) {
-                exact = false;
-                logger.info("exactAggregation is false because cube is partitioned and " + col + " is not on group by");
-            }
-        }
-
-        if (exact) {
-            logger.info("exactAggregation is true, cuboid id is " + cuboid.getId());
-        }
-        return exact;
-    }
-
     @SuppressWarnings("unchecked")
     private TupleFilter translateDerived(TupleFilter filter, Set<TblColRef> collector) {
         if (filter == null)
@@ -363,26 +305,13 @@ private void collectColumnsRecursively(TupleFilter filter, Set<TblColRef> collec
             return;
 
         if (filter instanceof ColumnTupleFilter) {
-            collectColumns(((ColumnTupleFilter) filter).getColumn(), collector);
+            collector.add(((ColumnTupleFilter) filter).getColumn());
         }
         for (TupleFilter child : filter.getChildren()) {
             collectColumnsRecursively(child, collector);
         }
     }
 
-    private void collectColumns(TblColRef col, Set<TblColRef> collector) {
-        if (cubeDesc.isExtendedColumn(col)) {
-            throw new CubeDesc.CannotFilterExtendedColumnException(col);
-        }
-        if (cubeDesc.isDerived(col)) {
-            DeriveInfo hostInfo = cubeDesc.getHostInfo(col);
-            for (TblColRef h : hostInfo.columns)
-                collector.add(h);
-        } else {
-            collector.add(col);
-        }
-    }
-
     private void setThresholdIfNecessary(Collection<TblColRef> dimensions, Collection<FunctionDesc> metrics, StorageContext context) {
         boolean hasMemHungryMeasure = false;
         for (FunctionDesc func : metrics) {
@@ -409,17 +338,20 @@ private void setThresholdIfNecessary(Collection<TblColRef> dimensions, Collectio
         }
     }
 
-    private void enableStorageLimitIfPossible(Cuboid cuboid, Collection<TblColRef> groups, Set<TblColRef> derivedPostAggregation, Collection<TblColRef> groupsD, TupleFilter filter, Collection<FunctionDesc> functionDescs, StorageContext context) {
+    private void enableStorageLimitIfPossible(Cuboid cuboid, Collection<TblColRef> groups, Set<TblColRef> derivedPostAggregation, Collection<TblColRef> groupsD, TupleFilter filter, Set<TblColRef> loosenedColumnD, Collection<FunctionDesc> functionDescs, StorageContext context) {
         boolean possible = true;
 
-        boolean goodFilter = filter == null || TupleFilter.isEvaluableRecursively(filter);
-        if (!goodFilter) {
+        if (!TupleFilter.isEvaluableRecursively(filter)) {
+            possible = false;
+            logger.info("Storage limit push down is impossible because the filter isn't evaluable");
+        }
+
+        if (!loosenedColumnD.isEmpty()) { // KYLIN-2173
             possible = false;
-            logger.info("Storage limit push down is impossible because the filter is unevaluatable");
+            logger.info("Storage limit push down is impossible because filter is loosened: " + loosenedColumnD);
         }
 
-        boolean goodSort = !context.hasSort();
-        if (!goodSort) {
+        if (context.hasSort()) {
             possible = false;
             logger.info("Storage limit push down is impossible because the query has order by");
         }
@@ -447,8 +379,8 @@ private void enableStorageLimitIfPossible(Cuboid cuboid, Collection<TblColRef> g
         }
 
         if (possible) {
-            logger.info("Enable limit " + context.getLimit());
             context.enableLimit();
+            context.setFinalPushDownLimit(cubeInstance);
         }
     }
 
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SegmentCubeTupleIterator.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SegmentCubeTupleIterator.java
index 61267ae16f..00ba247d51 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SegmentCubeTupleIterator.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SegmentCubeTupleIterator.java
@@ -37,8 +37,6 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Preconditions;
-
 public class SegmentCubeTupleIterator implements ITupleIterator {
 
     private static final Logger logger = LoggerFactory.getLogger(SegmentCubeTupleIterator.class);
@@ -98,8 +96,6 @@ public boolean hasNext() {
         }
         GTRecord curRecord = gtItr.next();
 
-        Preconditions.checkNotNull(cubeTupleConverter);
-
         // translate into tuple
         advMeasureFillers = cubeTupleConverter.translateResult(curRecord, tuple);
 
diff --git a/dev-support/sync_hbase_cdh_branches.sh b/dev-support/sync_hbase_cdh_branches.sh
index f52284a367..472f402c61 100644
--- a/dev-support/sync_hbase_cdh_branches.sh
+++ b/dev-support/sync_hbase_cdh_branches.sh
@@ -1,5 +1,22 @@
 #!/bin/bash
 
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
 # ============================================================================
 
 base=master
diff --git a/dev-support/test_all_against_hdp_2_2_4_2_2.sh b/dev-support/test_all_against_hdp_2_2_4_2_2.sh
old mode 100644
new mode 100755
diff --git a/engine-mr/pom.xml b/engine-mr/pom.xml
index a74011806c..47e7301d4c 100644
--- a/engine-mr/pom.xml
+++ b/engine-mr/pom.xml
@@ -17,8 +17,7 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
     <artifactId>kylin-engine-mr</artifactId>
@@ -28,7 +27,7 @@
     <parent>
         <groupId>org.apache.kylin</groupId>
         <artifactId>kylin</artifactId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
 
     </parent>
 
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java
index 159e5cb3c9..47eb9c36b8 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java
@@ -95,12 +95,14 @@ public HadoopShellExecutable createBuildDictionaryStep(String jobId) {
     public UpdateCubeInfoAfterBuildStep createUpdateCubeInfoAfterBuildStep(String jobId) {
         final UpdateCubeInfoAfterBuildStep result = new UpdateCubeInfoAfterBuildStep();
         result.setName(ExecutableConstants.STEP_NAME_UPDATE_CUBE_INFO);
+        result.getParams().put(BatchConstants.CFG_OUTPUT_PATH, getFactDistinctColumnsPath(jobId));
 
         CubingExecutableUtil.setCubeName(seg.getRealization().getName(), result.getParams());
         CubingExecutableUtil.setSegmentId(seg.getUuid(), result.getParams());
         CubingExecutableUtil.setCubingJobId(jobId, result.getParams());
         CubingExecutableUtil.setIndexPath(this.getSecondaryIndexPath(jobId), result.getParams());
 
+
         return result;
     }
 
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
index a5b2d2e334..1fd163118c 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
@@ -61,6 +61,7 @@
 import org.apache.kylin.common.util.CliCommandExecutor;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.common.util.StringSplitter;
+import org.apache.kylin.common.util.StringUtil;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.engine.mr.HadoopUtil;
@@ -95,8 +96,6 @@
 
     private static final String MAP_REDUCE_CLASSPATH = "mapreduce.application.classpath";
 
-    private static final String KYLIN_HIVE_DEPENDENCY_JARS = "[^,]*hive-exec[0-9.-]+[^,]*?\\.jar" + "|" + "[^,]*hive-metastore[0-9.-]+[^,]*?\\.jar" + "|" + "[^,]*hive-hcatalog-core[0-9.-]+[^,]*?\\.jar";
-
     protected static void runJob(Tool job, String[] args) {
         try {
             int exitCode = ToolRunner.run(job, args);
@@ -199,27 +198,26 @@ protected void setJobClasspath(Job job, KylinConfig kylinConf) {
             kylinHiveDependency = kylinHiveDependency.replace(":", ",");
 
             logger.info("Hive Dependencies Before Filtered: " + kylinHiveDependency);
-            String filteredHive = filterKylinHiveDependency(kylinHiveDependency);
+            String filteredHive = filterKylinHiveDependency(kylinHiveDependency, kylinConf);
             logger.info("Hive Dependencies After Filtered: " + filteredHive);
 
-            if (kylinDependency.length() > 0)
-                kylinDependency.append(",");
-            kylinDependency.append(filteredHive);
+            StringUtil.appendWithSeparator(kylinDependency, filteredHive);
         } else {
 
-            logger.info("No hive dependency jars set in the environment, will find them from jvm:");
+            logger.info("No hive dependency jars set in the environment, will find them from classpath:");
 
             try {
                 String hiveExecJarPath = ClassUtil.findContainingJar(Class.forName("org.apache.hadoop.hive.ql.Driver"));
-                kylinDependency.append(hiveExecJarPath).append(",");
+
+                StringUtil.appendWithSeparator(kylinDependency, hiveExecJarPath);
                 logger.info("hive-exec jar file: " + hiveExecJarPath);
 
                 String hiveHCatJarPath = ClassUtil.findContainingJar(Class.forName("org.apache.hive.hcatalog.mapreduce.HCatInputFormat"));
-                kylinDependency.append(hiveHCatJarPath).append(",");
+                StringUtil.appendWithSeparator(kylinDependency, hiveHCatJarPath);
                 logger.info("hive-catalog jar file: " + hiveHCatJarPath);
 
                 String hiveMetaStoreJarPath = ClassUtil.findContainingJar(Class.forName("org.apache.hadoop.hive.metastore.api.Table"));
-                kylinDependency.append(hiveMetaStoreJarPath).append(",");
+                StringUtil.appendWithSeparator(kylinDependency, hiveMetaStoreJarPath);
                 logger.info("hive-metastore jar file: " + hiveMetaStoreJarPath);
             } catch (ClassNotFoundException e) {
                 logger.error("Cannot found hive dependency jars: " + e);
@@ -229,34 +227,23 @@ protected void setJobClasspath(Job job, KylinConfig kylinConf) {
         // for kafka dependencies
         if (kylinKafkaDependency != null) {
             kylinKafkaDependency = kylinKafkaDependency.replace(":", ",");
-
-            logger.info("Kafka Dependencies Before Filtered: " + kylinKafkaDependency);
-
-            if (kylinDependency.length() > 0)
-                kylinDependency.append(",");
-            kylinDependency.append(kylinKafkaDependency);
+            logger.info("Kafka Dependencies: " + kylinKafkaDependency);
+            StringUtil.appendWithSeparator(kylinDependency, kylinKafkaDependency);
         } else {
-
-            logger.info("No Kafka dependency jars set in the environment, will find them from jvm:");
-
+            logger.info("No Kafka dependency jar set in the environment, will find them from classpath:");
             try {
                 String kafkaClientJarPath = ClassUtil.findContainingJar(Class.forName("org.apache.kafka.clients.consumer.KafkaConsumer"));
-                kylinDependency.append(kafkaClientJarPath).append(",");
+                StringUtil.appendWithSeparator(kylinDependency, kafkaClientJarPath);
                 logger.info("kafka jar file: " + kafkaClientJarPath);
 
             } catch (ClassNotFoundException e) {
-                logger.error("Cannot found kafka dependency jars: " + e);
+                logger.warn("Not found kafka client jar from classpath, it is optional for normal build: " + e);
             }
         }
 
         // for KylinJobMRLibDir
         String mrLibDir = kylinConf.getKylinJobMRLibDir();
-        if (!StringUtils.isBlank(mrLibDir)) {
-            if (kylinDependency.length() > 0) {
-                kylinDependency.append(",");
-            }
-            kylinDependency.append(mrLibDir);
-        }
+        StringUtil.appendWithSeparator(kylinDependency, mrLibDir);
 
         setJobTmpJarsAndFiles(job, kylinDependency.toString());
 
@@ -269,13 +256,13 @@ private void overrideJobConfig(Configuration jobConf, Map<String, String> overri
         }
     }
 
-    private String filterKylinHiveDependency(String kylinHiveDependency) {
+    private String filterKylinHiveDependency(String kylinHiveDependency, KylinConfig config) {
         if (StringUtils.isBlank(kylinHiveDependency))
             return "";
 
         StringBuilder jarList = new StringBuilder();
 
-        Pattern hivePattern = Pattern.compile(KYLIN_HIVE_DEPENDENCY_JARS);
+        Pattern hivePattern = Pattern.compile(config.getHiveDependencyFilterList());
         Matcher matcher = hivePattern.matcher(kylinHiveDependency);
 
         while (matcher.find()) {
@@ -308,9 +295,9 @@ private void setJobTmpJarsAndFiles(Job job, String kylinDependency) {
                     continue;
                 }
                 FileSystem fs;
-                if (hdfs.exists(p)) {
+                if (exists(hdfs, p)) {
                     fs = hdfs;
-                } else if (localfs.exists(p)) {
+                } else if (exists(localfs, p)) {
                     fs = localfs;
                 } else {
                     logger.warn("The directory of kylin dependency '" + fileName + "' does not exist, skip");
@@ -399,6 +386,15 @@ private String getDefaultMapRedClasspath() {
         return classpath;
     }
 
+    private static boolean exists(FileSystem fs, Path p) throws IOException {
+        try {
+            return fs.exists(p);
+        } catch (IllegalArgumentException ex) {
+            // can happen when FS mismatch
+            return false;
+        }
+    }
+
     public static int addInputDirs(String input, Job job) throws IOException {
         int folderNum = addInputDirs(StringSplitter.split(input, ","), job);
         logger.info("Number of added folders:" + folderNum);
@@ -414,7 +410,7 @@ public static int addInputDirs(String[] inputs, Job job) throws IOException {
                 FileSystem fs = FileSystem.get(job.getConfiguration());
                 Path path = new Path(inp);
 
-                if (!fs.exists(path)) {
+                if (!exists(fs, path)) {
                     logger.warn("Path not exist:" + path.toString());
                     continue;
                 }
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopStatusGetter.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopStatusGetter.java
index 619de90dad..7dcb73ee53 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopStatusGetter.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/HadoopStatusGetter.java
@@ -55,7 +55,7 @@
     private final String mrJobId;
     private final String yarnUrl;
 
-    protected static final Logger logger = LoggerFactory.getLogger(HadoopStatusChecker.class);
+    protected static final Logger logger = LoggerFactory.getLogger(HadoopStatusGetter.class);
 
     public HadoopStatusGetter(String yarnUrl, String mrJobId) {
         this.yarnUrl = yarnUrl;
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/BaseCuboidMapperBase.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/BaseCuboidMapperBase.java
index 4f0d3fdcd2..38595a0e60 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/BaseCuboidMapperBase.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/BaseCuboidMapperBase.java
@@ -195,8 +195,6 @@ private String getCell(int i, SplittedBytes[] splitBuffers) {
     }
 
     protected void outputKV(Context context) throws IOException, InterruptedException {
-        intermediateTableDesc.sanityCheck(bytesSplitter);
-
         byte[] rowKey = buildKey(bytesSplitter.getSplitBuffers());
         outputKey.set(rowKey, 0, rowKey.length);
 
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
index 2889ba8288..97d38291ec 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsReducer.java
@@ -130,14 +130,6 @@ public void reduce(Text key, Iterable<Text> values, Context context) throws IOEx
                     cuboidHLLMap.put(cuboidId, hll);
                 }
             }
-        } else if (isPartitionCol == true) {
-            // for partition col min/max value
-            ByteArray value = new ByteArray(Bytes.copy(key.getBytes(), 1, key.getLength() - 1));
-            if (colValues.size() > 1) {
-                colValues.set(1, value);
-            } else {
-                colValues.add(value);
-            }
         } else {
             colValues.add(new ByteArray(Bytes.copy(key.getBytes(), 1, key.getLength() - 1)));
             if (colValues.size() == 1000000) { //spill every 1 million
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java
index d6435b7cff..d6302b53ca 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/UpdateCubeInfoAfterBuildStep.java
@@ -18,16 +18,29 @@
 
 package org.apache.kylin.engine.mr.steps;
 
+import java.io.BufferedReader;
 import java.io.IOException;
+import java.io.InputStreamReader;
 
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.commons.lang3.time.FastDateFormat;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.kylin.common.util.DateFormat;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.engine.mr.CubingJob;
+import org.apache.kylin.engine.mr.HadoopUtil;
+import org.apache.kylin.engine.mr.common.BatchConstants;
 import org.apache.kylin.job.exception.ExecuteException;
 import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableContext;
 import org.apache.kylin.job.execution.ExecuteResult;
+import org.apache.kylin.metadata.datatype.DataType;
+import org.apache.kylin.metadata.model.TblColRef;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -59,6 +72,10 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio
         segment.setInputRecordsSize(sourceSizeBytes);
 
         try {
+            if (segment.isSourceOffsetsOn()) {
+                updateTimeRange(segment);
+            }
+
             cubeManager.promoteNewlyBuiltSegments(cube, segment);
             return new ExecuteResult(ExecuteResult.State.SUCCEED, "succeed");
         } catch (IOException e) {
@@ -67,4 +84,51 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio
         }
     }
 
+    private void updateTimeRange(CubeSegment segment) throws IOException {
+        final TblColRef partitionCol = segment.getCubeDesc().getModel().getPartitionDesc().getPartitionDateColumnRef();
+        final String outputPath = this.getParams().get(BatchConstants.CFG_OUTPUT_PATH);
+        final Path outputFile = new Path(outputPath, partitionCol.getName());
+
+        final DataType partitionColType = partitionCol.getType();
+        final FastDateFormat dateFormat;
+        if (partitionColType.isDate()) {
+            dateFormat = DateFormat.getDateFormat(DateFormat.DEFAULT_DATE_PATTERN);
+        } else if (partitionColType.isDatetime() || partitionColType.isTimestamp()) {
+            dateFormat = DateFormat.getDateFormat(DateFormat.DEFAULT_DATETIME_PATTERN_WITHOUT_MILLISECONDS);
+        } else if (partitionColType.isStringFamily()) {
+            String partitionDateFormat = segment.getCubeDesc().getModel().getPartitionDesc().getPartitionDateFormat();
+            if (StringUtils.isEmpty(partitionDateFormat)) {
+                partitionDateFormat = DateFormat.DEFAULT_DATE_PATTERN;
+            }
+            dateFormat = DateFormat.getDateFormat(partitionDateFormat);
+        } else {
+            throw new IllegalStateException("Type " + partitionColType + " is not valid partition column type");
+        }
+
+        long minValue = Long.MAX_VALUE, maxValue = Long.MIN_VALUE;
+        String currentValue;
+        FSDataInputStream inputStream = null;
+        BufferedReader bufferedReader = null;
+        try {
+            FileSystem fs = HadoopUtil.getFileSystem(outputPath);
+            inputStream = fs.open(outputFile);
+            bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
+            currentValue = bufferedReader.readLine();
+            while (currentValue != null) {
+                long time = dateFormat.parse(currentValue).getTime();
+                minValue = Math.min(time, minValue);
+                maxValue = Math.max(time, maxValue);
+                currentValue = bufferedReader.readLine();
+            }
+        } catch (Exception e) {
+            throw new IOException(e);
+        } finally {
+            IOUtils.closeQuietly(bufferedReader);
+            IOUtils.closeQuietly(inputStream);
+        }
+
+        segment.setDateRangeStart(minValue);
+        segment.setDateRangeEnd(maxValue);
+    }
+
 }
diff --git a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MockupMapContext.java b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MockupMapContext.java
index 847071d043..99004655da 100644
--- a/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MockupMapContext.java
+++ b/engine-mr/src/test/java/org/apache/kylin/engine/mr/steps/MockupMapContext.java
@@ -77,6 +77,7 @@ public void write(Object key, Object value) throws IOException, InterruptedExcep
                     outKV[0] = key;
                     outKV[1] = value;
                 }
+
             }
 
             @Override
@@ -99,6 +100,7 @@ public String getStatus() {
                 throw new NotImplementedException();
             }
 
+
             @Override
             public float getProgress() {
                 throw new NotImplementedException();
@@ -195,17 +197,17 @@ public String getJobName() {
             }
 
             @Override
-            public RawComparator<?> getSortComparator() {
+            public boolean userClassesTakesPrecedence() {
                 throw new NotImplementedException();
             }
 
             @Override
-            public String getJar() {
+            public RawComparator<?> getSortComparator() {
                 throw new NotImplementedException();
             }
 
             @Override
-            public RawComparator<?> getGroupingComparator() {
+            public String getJar() {
                 throw new NotImplementedException();
             }
 
@@ -221,7 +223,7 @@ public boolean getTaskCleanupNeeded() {
 
             @Override
             public boolean getProfileEnabled() {
-                throw new NotImplementedException();
+                return false;
             }
 
             @Override
@@ -308,6 +310,11 @@ public InputSplit getInputSplit() {
             public RawComparator<?> getCombinerKeyGroupingComparator() {
                 throw new NotImplementedException();
             }
+
+            @Override
+            public RawComparator<?> getGroupingComparator() {
+                return null;
+            }
         });
     }
 }
diff --git a/engine-spark/pom.xml b/engine-spark/pom.xml
index 397e06b52c..3dfd99e7a8 100644
--- a/engine-spark/pom.xml
+++ b/engine-spark/pom.xml
@@ -17,13 +17,12 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
     <parent>
         <groupId>org.apache.kylin</groupId>
         <artifactId>kylin</artifactId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
 
     </parent>
 
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java
index 9b690637b8..f96da7a9b3 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java
@@ -544,7 +544,7 @@ private void bulkLoadHFile(String cubeName, String segmentId, String hfileLocati
         kyroClasses.add(Object[].class);
         kyroClasses.add(org.apache.spark.sql.types.StringType$.class);
         kyroClasses.add(Hashing.murmur3_128().getClass());
-        kyroClasses.add(org.apache.spark.sql.columnar.CachedBatch.class);
+        kyroClasses.add(org.apache.spark.sql.execution.columnar.CachedBatch.class);
         kyroClasses.add(byte[][].class);
         kyroClasses.add(org.apache.spark.sql.types.Decimal.class);
         kyroClasses.add(scala.math.BigDecimal.class);
diff --git a/examples/test_case_data/localmeta/cube_desc/ut_cube_desc_combination_int_overflow.json b/examples/test_case_data/localmeta/cube_desc/ut_cube_desc_combination_int_overflow.json
new file mode 100644
index 0000000000..e8e2a52874
--- /dev/null
+++ b/examples/test_case_data/localmeta/cube_desc/ut_cube_desc_combination_int_overflow.json
@@ -0,0 +1,398 @@
+{
+  "uuid" : "9e89e128-f13e-4209-82d2-973985114793",
+  "last_modified" : 1479207711845,
+  "name" : "ut_cube_desc_combination_int_overflow",
+  "model_name" : "ut_large_dimension_number",
+  "description" : "",
+  "null_string" : null,
+  "dimensions" : [ {
+    "name" : "DEFAULT.WIDE_TABLE.A",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "A",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.B",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "B",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.C",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "C",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.D",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "D",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.E",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "E",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.F",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "F",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.G",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "G",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.H",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "H",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.I",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "I",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.J",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "J",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.K",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "K",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.L",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "L",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.M",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "M",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.N",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "N",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.O",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "O",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.P",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "P",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.Q",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "Q",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.R",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "R",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.S",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "S",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.T",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "T",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.U",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "U",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.V",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "V",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.W",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "W",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.X",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "X",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.Y",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "Y",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.Z",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "Z",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AA",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AA",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AB",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AB",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AC",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AC",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AD",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AD",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AE",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AE",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AF",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AF",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AG",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AG",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AH",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AH",
+    "derived" : null
+  } ],
+  "measures" : [ {
+    "name" : "_COUNT_",
+    "function" : {
+      "expression" : "COUNT",
+      "parameter" : {
+        "type" : "constant",
+        "value" : "1",
+        "next_parameter" : null
+      },
+      "returntype" : "bigint"
+    },
+    "dependent_measure_ref" : null
+  } ],
+  "dictionaries" : [ ],
+  "rowkey" : {
+    "rowkey_columns" : [ {
+      "column" : "A",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "B",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "C",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "D",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "E",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "F",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "G",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "H",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "I",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "J",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "K",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "L",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "M",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "N",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "O",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "P",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "Q",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "R",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "S",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "T",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "U",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "V",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "W",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "X",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "Y",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "Z",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AA",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AB",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AC",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AD",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AE",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AF",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AG",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AH",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }]
+  },
+  "hbase_mapping" : {
+    "column_family" : [ {
+      "name" : "F1",
+      "columns" : [ {
+        "qualifier" : "M",
+        "measure_refs" : [ "_COUNT_" ]
+      } ]
+    } ]
+  },
+  "aggregation_groups" : [ {
+    "includes" : [ "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", "AA", "AB", "AC", "AD", "AE", "AF", "AG", "AH"],
+    "select_rule" : {
+      "hierarchy_dims" : [ ],
+      "mandatory_dims" : [ "A" ],
+      "joint_dims" : [ ["L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", "AA", "AB", "AC", "AD", "AE", "AF", "AG", "AH"] ]
+    }
+  }],
+  "signature" : "4NqJVcVIYRF5PkJ0jS+9Rg==",
+  "notify_list" : [ ],
+  "status_need_notify" : [ "ERROR", "DISCARDED", "SUCCEED" ],
+  "partition_date_start" : 1469923200000,
+  "partition_date_end" : 3153600000000,
+  "auto_merge_time_ranges" : [ 604800000, 2419200000 ],
+  "retention_range" : 0,
+  "engine_type" : 100,
+  "storage_type" : 100,
+  "override_kylin_properties" : {
+    "kylin.cube.algorithm" : "auto",
+    "kylin.cube.aggrgroup.max.combination" : "4096",
+    "kylin.job.cubing.inmem.sampling.percent" : "100",
+    "kylin.hbase.default.compression.codec" : "none"
+  }
+}
\ No newline at end of file
diff --git a/examples/test_case_data/localmeta/cube_desc/ut_large_dimension_number.json b/examples/test_case_data/localmeta/cube_desc/ut_large_dimension_number.json
new file mode 100644
index 0000000000..c0e85680d7
--- /dev/null
+++ b/examples/test_case_data/localmeta/cube_desc/ut_large_dimension_number.json
@@ -0,0 +1,585 @@
+{
+  "uuid" : "9e89e128-f13e-4209-82d2-973985114793",
+  "last_modified" : 1479207711845,
+  "name" : "ut_large_dimension_number",
+  "model_name" : "ut_large_dimension_number",
+  "description" : "",
+  "null_string" : null,
+  "dimensions" : [ {
+    "name" : "DEFAULT.WIDE_TABLE.A",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "A",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.B",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "B",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.C",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "C",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.D",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "D",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.E",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "E",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.F",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "F",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.G",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "G",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.H",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "H",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.I",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "I",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.J",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "J",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.K",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "K",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.L",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "L",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.M",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "M",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.N",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "N",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.O",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "O",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.P",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "P",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.Q",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "Q",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.R",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "R",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.S",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "S",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.T",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "T",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.U",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "U",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.V",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "V",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.W",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "W",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.X",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "X",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.Y",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "Y",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.Z",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "Z",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AA",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AA",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AB",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AB",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AC",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AC",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AD",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AD",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AE",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AE",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AF",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AF",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AG",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AG",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AH",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AH",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AI",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AI",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AJ",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AJ",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AK",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AK",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AL",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AL",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AM",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AM",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AN",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AN",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AO",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AO",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AP",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AP",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AQ",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AQ",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AR",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AR",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AS",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AS",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AT",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AT",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AU",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AU",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AV",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AV",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AW",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AW",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AX",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AX",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AY",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AY",
+    "derived" : null
+  },{
+    "name" : "DEFAULT.WIDE_TABLE.AZ",
+    "table" : "DEFAULT.WIDE_TABLE",
+    "column" : "AZ",
+    "derived" : null
+  } ],
+  "measures" : [ {
+    "name" : "_COUNT_",
+    "function" : {
+      "expression" : "COUNT",
+      "parameter" : {
+        "type" : "constant",
+        "value" : "1",
+        "next_parameter" : null
+      },
+      "returntype" : "bigint"
+    },
+    "dependent_measure_ref" : null
+  } ],
+  "dictionaries" : [ ],
+  "rowkey" : {
+    "rowkey_columns" : [ {
+      "column" : "A",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "B",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "C",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "D",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "E",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "F",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "G",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "H",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "I",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "J",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "K",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "L",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "M",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "N",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "O",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "P",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "Q",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "R",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "S",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "T",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "U",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "V",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "W",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "X",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "Y",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "Z",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AA",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AB",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AC",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AD",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AE",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AF",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AG",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AH",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AI",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AJ",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AK",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AL",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AM",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AN",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AO",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AP",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AQ",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AR",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AS",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AT",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    }, {
+      "column" : "AU",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    },{
+      "column" : "AV",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    },{
+      "column" : "AW",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    },{
+      "column" : "AX",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    },{
+      "column" : "AY",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    },{
+      "column" : "AZ",
+      "encoding" : "dict",
+      "isShardBy" : false,
+      "index" : "eq"
+    } ]
+  },
+  "hbase_mapping" : {
+    "column_family" : [ {
+      "name" : "F1",
+      "columns" : [ {
+        "qualifier" : "M",
+        "measure_refs" : [ "_COUNT_" ]
+      } ]
+    } ]
+  },
+  "aggregation_groups" : [ {
+    "includes" : [ "A", "B", "C", "D", "E" ],
+    "select_rule" : {
+      "hierarchy_dims" : [ ],
+      "mandatory_dims" : [ ],
+      "joint_dims" : [ ]
+    }
+  }, {
+    "includes" : [ "F", "G", "H", "I", "J", "K", "L", "M" ],
+    "select_rule" : {
+      "hierarchy_dims" : [ ],
+      "mandatory_dims" : [ "F" ],
+      "joint_dims" : [ ]
+    }
+  } ],
+  "signature" : "4NqJVcVIYRF5PkJ0jS+9Rg==",
+  "notify_list" : [ ],
+  "status_need_notify" : [ "ERROR", "DISCARDED", "SUCCEED" ],
+  "partition_date_start" : 1469923200000,
+  "partition_date_end" : 3153600000000,
+  "auto_merge_time_ranges" : [ 604800000, 2419200000 ],
+  "retention_range" : 0,
+  "engine_type" : 100,
+  "storage_type" : 100,
+  "override_kylin_properties" : {
+    "kylin.cube.algorithm" : "auto",
+    "kylin.cube.aggrgroup.max.combination" : "4096",
+    "kylin.job.cubing.inmem.sampling.percent" : "100",
+    "kylin.hbase.default.compression.codec" : "none"
+  }
+}
\ No newline at end of file
diff --git a/examples/test_case_data/localmeta/model_desc/ut_large_dimension_number.json b/examples/test_case_data/localmeta/model_desc/ut_large_dimension_number.json
new file mode 100644
index 0000000000..38b070ec27
--- /dev/null
+++ b/examples/test_case_data/localmeta/model_desc/ut_large_dimension_number.json
@@ -0,0 +1,26 @@
+{
+  "uuid" : "dd250ea4-27f7-4450-b604-fcfdf3578f5a",
+  "last_modified" : 1479206338080,
+  "version" : "1.6.0",
+  "name" : "ut_large_dimension_number",
+  "owner" : "ADMIN",
+  "description" : "",
+  "fact_table" : "DEFAULT.WIDE_TABLE",
+  "lookups" : [],
+  "dimensions" : [ {
+    "table" : "DEFAULT.WIDE_TABLE",
+    "columns" : ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", "AA", "AB", "AC", "AD", "AE", "AF", "AG", "AH", "AI", "AJ", "AK", "AL", "AM", "AN", "AO", "AP", "AQ", "AR", "AS", "AT", "AU", "AV", "AW", "AX", "AY", "AZ"]
+  }],
+  "metrics" : [ "A" ],
+  "filter_condition" : "",
+  "partition_desc" : {
+    "partition_date_column" : "DEFAULT.WIDE_TABLE.A",
+    "partition_time_column" : null,
+    "partition_date_start" : 0,
+    "partition_date_format" : "yyyyMMdd",
+    "partition_time_format" : "HH:mm:ss",
+    "partition_type" : "APPEND",
+    "partition_condition_builder" : "org.apache.kylin.metadata.model.PartitionDesc$DefaultPartitionConditionBuilder"
+  },
+  "capacity" : "MEDIUM"
+}
\ No newline at end of file
diff --git a/examples/test_case_data/localmeta/table/DEFAULT.WIDE_TABLE.json b/examples/test_case_data/localmeta/table/DEFAULT.WIDE_TABLE.json
new file mode 100644
index 0000000000..ef142d18b9
--- /dev/null
+++ b/examples/test_case_data/localmeta/table/DEFAULT.WIDE_TABLE.json
@@ -0,0 +1,217 @@
+{
+  "uuid" : "cd267e5b-6835-4db2-a861-8b78f38271fe",
+  "last_modified" : 1479202699684,
+  "version" : "1.6.0",
+  "name" : "WIDE_TABLE",
+  "columns" : [ {
+    "id" : "1",
+    "name" : "A",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "2",
+    "name" : "B",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "3",
+    "name" : "C",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "4",
+    "name" : "D",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "5",
+    "name" : "E",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "6",
+    "name" : "F",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "7",
+    "name" : "G",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "8",
+    "name" : "H",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "9",
+    "name" : "I",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "10",
+    "name" : "J",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "11",
+    "name" : "K",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "12",
+    "name" : "L",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "13",
+    "name" : "M",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "14",
+    "name" : "N",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "15",
+    "name" : "O",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "16",
+    "name" : "P",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "17",
+    "name" : "Q",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "18",
+    "name" : "R",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "19",
+    "name" : "S",
+    "datatype" : "bigint"
+  }, {
+    "id" : "20",
+    "name" : "T",
+    "datatype" : "bigint"
+  }, {
+    "id" : "21",
+    "name" : "U",
+    "datatype" : "bigint"
+  }, {
+    "id" : "22",
+    "name" : "V",
+    "datatype" : "bigint"
+  }, {
+    "id" : "23",
+    "name" : "W",
+    "datatype" : "bigint"
+  }, {
+    "id" : "24",
+    "name" : "X",
+    "datatype" : "bigint"
+  }, {
+    "id" : "25",
+    "name" : "Y",
+    "datatype" : "bigint"
+  }, {
+    "id" : "26",
+    "name" : "Z",
+    "datatype" : "bigint"
+  }, {
+    "id" : "27",
+    "name" : "AA",
+    "datatype" : "bigint"
+  }, {
+    "id" : "28",
+    "name" : "AB",
+    "datatype" : "bigint"
+  }, {
+    "id" : "29",
+    "name" : "AC",
+    "datatype" : "bigint"
+  }, {
+    "id" : "30",
+    "name" : "AD",
+    "datatype" : "bigint"
+  }, {
+    "id" : "31",
+    "name" : "AE",
+    "datatype" : "bigint"
+  }, {
+    "id" : "32",
+    "name" : "AF",
+    "datatype" : "bigint"
+  }, {
+    "id" : "33",
+    "name" : "AG",
+    "datatype" : "bigint"
+  }, {
+    "id" : "34",
+    "name" : "AH",
+    "datatype" : "bigint"
+  }, {
+    "id" : "35",
+    "name" : "AI",
+    "datatype" : "bigint"
+  }, {
+    "id" : "36",
+    "name" : "AJ",
+    "datatype" : "bigint"
+  }, {
+    "id" : "37",
+    "name" : "AK",
+    "datatype" : "bigint"
+  }, {
+    "id" : "38",
+    "name" : "AL",
+    "datatype" : "bigint"
+  }, {
+    "id" : "39",
+    "name" : "AM",
+    "datatype" : "bigint"
+  }, {
+    "id" : "40",
+    "name" : "AN",
+    "datatype" : "bigint"
+  }, {
+    "id" : "41",
+    "name" : "AO",
+    "datatype" : "bigint"
+  }, {
+    "id" : "42",
+    "name" : "AP",
+    "datatype" : "bigint"
+  }, {
+    "id" : "43",
+    "name" : "AQ",
+    "datatype" : "bigint"
+  }, {
+    "id" : "44",
+    "name" : "AR",
+    "datatype" : "bigint"
+  }, {
+    "id" : "45",
+    "name" : "AS",
+    "datatype" : "bigint"
+  }, {
+    "id" : "46",
+    "name" : "AT",
+    "datatype" : "bigint"
+  }, {
+    "id" : "47",
+    "name" : "AU",
+    "datatype" : "bigint"
+  }, {
+    "id" : "48",
+    "name" : "AV",
+    "datatype" : "bigint"
+  }, {
+    "id" : "49",
+    "name" : "AW",
+    "datatype" : "bigint"
+  }, {
+    "id" : "50",
+    "name" : "AX",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "51",
+    "name" : "AY",
+    "datatype" : "bigint"
+  }, {
+    "id" : "52",
+    "name" : "AZ",
+    "datatype" : "varchar(256)"
+  } ],
+  "source_type" : 0,
+  "database" : "DEFAULT"
+}
\ No newline at end of file
diff --git a/examples/test_case_data/sandbox/core-site.xml b/examples/test_case_data/sandbox/core-site.xml
index 9aa588c6b4..61624060eb 100644
--- a/examples/test_case_data/sandbox/core-site.xml
+++ b/examples/test_case_data/sandbox/core-site.xml
@@ -14,152 +14,146 @@
   See the License for the specific language governing permissions and
   limitations under the License.
 -->
+<!--Autogenerated by Cloudera Manager-->
 <configuration>
-
     <property>
         <name>fs.defaultFS</name>
-        <value>hdfs://sandbox.hortonworks.com:8020</value>
-        <final>true</final>
+        <value>hdfs://quickstart.cloudera:8020</value>
     </property>
-
     <property>
         <name>fs.trash.interval</name>
-        <value>360</value>
+        <value>1</value>
     </property>
-
     <property>
-        <name>ha.failover-controller.active-standby-elector.zk.op.retries</name>
-        <value>120</value>
+        <name>io.compression.codecs</name>
+        <value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.DeflateCodec,org.apache.hadoop.io.compress.SnappyCodec,org.apache.hadoop.io.compress.Lz4Codec</value>
     </property>
-
     <property>
-        <name>hadoop.http.authentication.simple.anonymous.allowed</name>
-        <value>true</value>
+        <name>hadoop.security.authentication</name>
+        <value>simple</value>
     </property>
-
     <property>
-        <name>hadoop.proxyuser.falcon.groups</name>
-        <value>users</value>
+        <name>hadoop.security.authorization</name>
+        <value>false</value>
+    </property>
+    <property>
+        <name>hadoop.rpc.protection</name>
+        <value>authentication</value>
+    </property>
+    <property>
+        <name>hadoop.security.auth_to_local</name>
+        <value>DEFAULT</value>
     </property>
-
     <property>
-        <name>hadoop.proxyuser.falcon.hosts</name>
+        <name>hadoop.proxyuser.oozie.hosts</name>
         <value>*</value>
     </property>
-
     <property>
-        <name>hadoop.proxyuser.hbase.groups</name>
-        <value>users</value>
+        <name>hadoop.proxyuser.oozie.groups</name>
+        <value>*</value>
     </property>
-
     <property>
-        <name>hadoop.proxyuser.hbase.hosts</name>
+        <name>hadoop.proxyuser.mapred.hosts</name>
         <value>*</value>
     </property>
-
     <property>
-        <name>hadoop.proxyuser.hcat.groups</name>
+        <name>hadoop.proxyuser.mapred.groups</name>
         <value>*</value>
     </property>
-
     <property>
-        <name>hadoop.proxyuser.hcat.hosts</name>
-        <value>sandbox.hortonworks.com</value>
+        <name>hadoop.proxyuser.flume.hosts</name>
+        <value>*</value>
     </property>
-
     <property>
-        <name>hadoop.proxyuser.hive.groups</name>
-        <value>users</value>
+        <name>hadoop.proxyuser.flume.groups</name>
+        <value>*</value>
+    </property>
+    <property>
+        <name>hadoop.proxyuser.HTTP.hosts</name>
+        <value>*</value>
+    </property>
+    <property>
+        <name>hadoop.proxyuser.HTTP.groups</name>
+        <value>*</value>
     </property>
-
     <property>
         <name>hadoop.proxyuser.hive.hosts</name>
         <value>*</value>
     </property>
-
     <property>
-        <name>hadoop.proxyuser.hue.groups</name>
+        <name>hadoop.proxyuser.hive.groups</name>
         <value>*</value>
     </property>
-
     <property>
         <name>hadoop.proxyuser.hue.hosts</name>
         <value>*</value>
     </property>
-
     <property>
-        <name>hadoop.proxyuser.oozie.groups</name>
+        <name>hadoop.proxyuser.hue.groups</name>
         <value>*</value>
     </property>
-
     <property>
-        <name>hadoop.proxyuser.oozie.hosts</name>
-        <value>sandbox.hortonworks.com</value>
+        <name>hadoop.proxyuser.httpfs.hosts</name>
+        <value>*</value>
     </property>
-
     <property>
-        <name>hadoop.proxyuser.root.groups</name>
+        <name>hadoop.proxyuser.httpfs.groups</name>
         <value>*</value>
     </property>
-
     <property>
-        <name>hadoop.proxyuser.root.hosts</name>
+        <name>hadoop.proxyuser.hdfs.groups</name>
         <value>*</value>
     </property>
-
     <property>
-        <name>hadoop.security.auth_to_local</name>
-        <value>DEFAULT</value>
+        <name>hadoop.proxyuser.hdfs.hosts</name>
+        <value>*</value>
     </property>
-
     <property>
-        <name>hadoop.security.authentication</name>
-        <value>simple</value>
+        <name>hadoop.proxyuser.yarn.hosts</name>
+        <value>*</value>
     </property>
-
     <property>
-        <name>hadoop.security.authorization</name>
-        <value>false</value>
+        <name>hadoop.proxyuser.yarn.groups</name>
+        <value>*</value>
     </property>
-
     <property>
-        <name>io.compression.codecs</name>
-        <value>org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec</value>
+        <name>hadoop.security.group.mapping</name>
+        <value>org.apache.hadoop.security.ShellBasedUnixGroupsMapping</value>
     </property>
-
     <property>
-        <name>io.file.buffer.size</name>
-        <value>131072</value>
+        <name>hadoop.security.instrumentation.requires.admin</name>
+        <value>false</value>
     </property>
-
     <property>
-        <name>io.serializations</name>
-        <value>org.apache.hadoop.io.serializer.WritableSerialization</value>
+        <name>net.topology.script.file.name</name>
+        <value>/etc/hadoop/conf.cloudera.yarn/topology.py</value>
     </property>
-
     <property>
-        <name>ipc.client.connect.max.retries</name>
-        <value>50</value>
+        <name>io.file.buffer.size</name>
+        <value>65536</value>
     </property>
-
     <property>
-        <name>ipc.client.connection.maxidletime</name>
-        <value>30000</value>
+        <name>hadoop.ssl.enabled</name>
+        <value>false</value>
+    </property>
+    <property>
+        <name>hadoop.ssl.require.client.cert</name>
+        <value>false</value>
+        <final>true</final>
     </property>
-
     <property>
-        <name>ipc.client.idlethreshold</name>
-        <value>8000</value>
+        <name>hadoop.ssl.keystores.factory.class</name>
+        <value>org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory</value>
+        <final>true</final>
     </property>
-
     <property>
-        <name>ipc.server.tcpnodelay</name>
-        <value>true</value>
+        <name>hadoop.ssl.server.conf</name>
+        <value>ssl-server.xml</value>
+        <final>true</final>
     </property>
-
     <property>
-        <name>mapreduce.jobtracker.webinterface.trusted</name>
-        <value>false</value>
+        <name>hadoop.ssl.client.conf</name>
+        <value>ssl-client.xml</value>
+        <final>true</final>
     </property>
-
 </configuration>
diff --git a/examples/test_case_data/sandbox/hbase-site.xml b/examples/test_case_data/sandbox/hbase-site.xml
index 46d5345be5..58c62231a0 100644
--- a/examples/test_case_data/sandbox/hbase-site.xml
+++ b/examples/test_case_data/sandbox/hbase-site.xml
@@ -15,197 +15,104 @@
   limitations under the License.
 -->
 <configuration>
-
-    <property>
-        <name>dfs.domain.socket.path</name>
-        <value>/var/lib/hadoop-hdfs/dn_socket</value>
-    </property>
-
-    <property>
-        <name>hbase.client.keyvalue.maxsize</name>
-        <value>10485760</value>
-    </property>
-
     <property>
-        <name>hbase.client.scanner.caching</name>
-        <value>100</value>
+        <name>hbase.rootdir</name>
+        <value>hdfs://quickstart.cloudera:8020/hbase</value>
     </property>
-
     <property>
-        <name>hbase.cluster.distributed</name>
+        <name>hbase.replication</name>
         <value>true</value>
     </property>
-
-    <property>
-        <name>hbase.coprocessor.master.classes</name>
-        <value>com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor</value>
-    </property>
-
-    <property>
-        <name>hbase.coprocessor.region.classes</name>
-        <value>com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor</value>
-    </property>
-
     <property>
-        <name>hbase.defaults.for.version.skip</name>
-        <value>true</value>
+        <name>hbase.client.write.buffer</name>
+        <value>2097152</value>
     </property>
-
     <property>
-        <name>hbase.hregion.majorcompaction</name>
-        <value>604800000</value>
-    </property>
-
-    <property>
-        <name>hbase.hregion.majorcompaction.jitter</name>
-        <value>0.50</value>
+        <name>hbase.client.pause</name>
+        <value>100</value>
     </property>
-
     <property>
-        <name>hbase.hregion.max.filesize</name>
-        <value>10737418240</value>
+        <name>hbase.client.retries.number</name>
+        <value>35</value>
     </property>
-
     <property>
-        <name>hbase.hregion.memstore.block.multiplier</name>
-        <value>4</value>
+        <name>hbase.client.scanner.caching</name>
+        <value>100</value>
     </property>
-
     <property>
-        <name>hbase.hregion.memstore.flush.size</name>
-        <value>134217728</value>
+        <name>hbase.client.keyvalue.maxsize</name>
+        <value>10485760</value>
     </property>
-
     <property>
-        <name>hbase.hregion.memstore.mslab.enabled</name>
+        <name>hbase.ipc.client.allowsInterrupt</name>
         <value>true</value>
     </property>
-
     <property>
-        <name>hbase.hstore.blockingStoreFiles</name>
+        <name>hbase.client.primaryCallTimeout.get</name>
         <value>10</value>
     </property>
-
     <property>
-        <name>hbase.hstore.compactionThreshold</name>
-        <value>3</value>
+        <name>hbase.client.primaryCallTimeout.multiget</name>
+        <value>10</value>
     </property>
-
     <property>
-        <name>hbase.local.dir</name>
-        <value>${hbase.tmp.dir}/local</value>
+        <name>hbase.regionserver.thrift.http</name>
+        <value>false</value>
     </property>
-
     <property>
-        <name>hbase.master.info.bindAddress</name>
-        <value>0.0.0.0</value>
+        <name>hbase.thrift.support.proxyuser</name>
+        <value>false</value>
     </property>
-
     <property>
-        <name>hbase.master.info.port</name>
-        <value>60010</value>
-    </property>
-
-    <property>
-        <name>hbase.master.port</name>
+        <name>hbase.rpc.timeout</name>
         <value>60000</value>
     </property>
-
-    <property>
-        <name>hbase.regionserver.global.memstore.lowerLimit</name>
-        <value>0.38</value>
-    </property>
-
-    <property>
-        <name>hbase.regionserver.global.memstore.upperLimit</name>
-        <value>0.4</value>
-    </property>
-
     <property>
-        <name>hbase.regionserver.handler.count</name>
-        <value>60</value>
+        <name>hbase.snapshot.enabled</name>
+        <value>true</value>
     </property>
-
     <property>
-        <name>hbase.regionserver.info.port</name>
-        <value>60030</value>
+        <name>hbase.snapshot.master.timeoutMillis</name>
+        <value>60000</value>
     </property>
-
     <property>
-        <name>hbase.rootdir</name>
-        <value>hdfs://sandbox.hortonworks.com:8020/apps/hbase/data</value>
+        <name>hbase.snapshot.region.timeout</name>
+        <value>60000</value>
     </property>
-
     <property>
-        <name>hbase.rpc.protection</name>
-        <value>PRIVACY</value>
+        <name>hbase.snapshot.master.timeout.millis</name>
+        <value>60000</value>
     </property>
-
     <property>
         <name>hbase.security.authentication</name>
         <value>simple</value>
     </property>
-
     <property>
-        <name>hbase.security.authorization</name>
-        <value>true</value>
+        <name>hbase.rpc.protection</name>
+        <value>authentication</value>
     </property>
-
     <property>
-        <name>hbase.superuser</name>
-        <value>hbase</value>
+        <name>zookeeper.session.timeout</name>
+        <value>60000</value>
     </property>
-
     <property>
-        <name>hbase.tmp.dir</name>
-        <value>/hadoop/hbase</value>
+        <name>zookeeper.znode.parent</name>
+        <value>/hbase</value>
     </property>
-
     <property>
-        <name>hbase.zookeeper.property.clientPort</name>
-        <value>2181</value>
+        <name>zookeeper.znode.rootserver</name>
+        <value>root-region-server</value>
     </property>
-
     <property>
         <name>hbase.zookeeper.quorum</name>
-        <value>sandbox.hortonworks.com</value>
-    </property>
-
-    <property>
-        <name>hbase.zookeeper.useMulti</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>hfile.block.cache.size</name>
-        <value>0.40</value>
+        <value>quickstart.cloudera</value>
     </property>
-
     <property>
-        <name>zookeeper.session.timeout</name>
-        <value>30000</value>
-    </property>
-
-    <property>
-        <name>zookeeper.znode.parent</name>
-        <value>/hbase-unsecure</value>
-    </property>
-    <property>
-        <name>hbase.client.pause</name>
-        <value>100</value>
-        <description>General client pause value.  Used mostly as value to wait
-            before running a retry of a failed get, region lookup, etc.
-            See hbase.client.retries.number for description of how we backoff from
-            this initial pause amount and how this pause works w/ retries.</description>
+        <name>hbase.zookeeper.property.clientPort</name>
+        <value>2181</value>
     </property>
     <property>
-        <name>hbase.client.retries.number</name>
-        <value>5</value>
-        <description>Maximum retries.  Used as maximum for all retryable
-            operations such as the getting of a cell's value, starting a row update,
-            etc.  Retry interval is a rough function based on hbase.client.pause.  At
-            first we retry at this interval but then with backoff, we pretty quickly reach
-            retrying every ten seconds.  See HConstants#RETRY_BACKOFF for how the backup
-            ramps up.  Change this setting and hbase.client.pause to suit your workload.</description>
+        <name>hbase.rest.ssl.enabled</name>
+        <value>false</value>
     </property>
 </configuration>
diff --git a/examples/test_case_data/sandbox/hdfs-site.xml b/examples/test_case_data/sandbox/hdfs-site.xml
index 1175fffc6c..05854bd613 100644
--- a/examples/test_case_data/sandbox/hdfs-site.xml
+++ b/examples/test_case_data/sandbox/hdfs-site.xml
@@ -15,271 +15,68 @@
   limitations under the License.
 -->
 <configuration>
-
-    <property>
-        <name>dfs.block.access.token.enable</name>
-        <value>false</value>
-    </property>
-
-    <property>
-        <name>dfs.block.size</name>
-        <value>34217472</value>
-    </property>
-
-    <property>
-        <name>dfs.blockreport.initialDelay</name>
-        <value>120</value>
-    </property>
-
-    <property>
-        <name>dfs.blocksize</name>
-        <value>134217728</value>
-    </property>
-
-    <property>
-        <name>dfs.client.read.shortcircuit</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>dfs.client.read.shortcircuit.streams.cache.size</name>
-        <value>4096</value>
-    </property>
-
-    <property>
-        <name>dfs.cluster.administrators</name>
-        <value>hdfs</value>
-    </property>
-
-    <property>
-        <name>dfs.datanode.address</name>
-        <value>0.0.0.0:50010</value>
-    </property>
-
-    <property>
-        <name>dfs.datanode.balance.bandwidthPerSec</name>
-        <value>6250000</value>
-    </property>
-
-    <property>
-        <name>dfs.datanode.data.dir</name>
-        <value>/hadoop/hdfs/data</value>
-        <final>true</final>
-    </property>
-
-    <property>
-        <name>dfs.datanode.data.dir.perm</name>
-        <value>750</value>
-    </property>
-
     <property>
-        <name>dfs.datanode.du.reserved</name>
-        <value>1073741824</value>
-    </property>
-
-    <property>
-        <name>dfs.datanode.failed.volumes.tolerated</name>
-        <value>0</value>
-        <final>true</final>
-    </property>
-
-    <property>
-        <name>dfs.datanode.http.address</name>
-        <value>0.0.0.0:50075</value>
-    </property>
-
-    <property>
-        <name>dfs.datanode.https.address</name>
-        <value>0.0.0.0:50475</value>
-    </property>
-
-    <property>
-        <name>dfs.datanode.ipc.address</name>
-        <value>0.0.0.0:8010</value>
-    </property>
-
-    <property>
-        <name>dfs.datanode.max.transfer.threads</name>
-        <value>1024</value>
-    </property>
-
-    <property>
-        <name>dfs.datanode.max.xcievers</name>
-        <value>1024</value>
-    </property>
-
-    <property>
-        <name>dfs.domain.socket.path</name>
-        <value>/var/lib/hadoop-hdfs/dn_socket</value>
-    </property>
-
-    <property>
-        <name>dfs.heartbeat.interval</name>
-        <value>3</value>
+        <name>dfs.namenode.name.dir</name>
+        <value>file:///var/lib/hadoop-hdfs/cache/hdfs/dfs/name</value>
     </property>
-
     <property>
-        <name>dfs.hosts.exclude</name>
-        <value>/etc/hadoop/conf/dfs.exclude</value>
+        <name>dfs.namenode.servicerpc-address</name>
+        <value>quickstart.cloudera:8022</value>
     </property>
-
     <property>
-        <name>dfs.http.policy</name>
-        <value>HTTP_ONLY</value>
+        <name>dfs.https.address</name>
+        <value>quickstart.cloudera:50470</value>
     </property>
-
     <property>
         <name>dfs.https.port</name>
         <value>50470</value>
     </property>
-
-    <property>
-        <name>dfs.journalnode.edits.dir</name>
-        <value>/hadoop/hdfs/journalnode</value>
-    </property>
-
-    <property>
-        <name>dfs.journalnode.http-address</name>
-        <value>0.0.0.0:8480</value>
-    </property>
-
-    <property>
-        <name>dfs.journalnode.https-address</name>
-        <value>0.0.0.0:8481</value>
-    </property>
-
-    <property>
-        <name>dfs.namenode.accesstime.precision</name>
-        <value>3600000</value>
-    </property>
-
-    <property>
-        <name>dfs.namenode.avoid.read.stale.datanode</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>dfs.namenode.avoid.write.stale.datanode</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>dfs.namenode.checkpoint.dir</name>
-        <value>/hadoop/hdfs/namesecondary</value>
-    </property>
-
-    <property>
-        <name>dfs.namenode.checkpoint.edits.dir</name>
-        <value>${dfs.namenode.checkpoint.dir}</value>
-    </property>
-
-    <property>
-        <name>dfs.namenode.checkpoint.period</name>
-        <value>21600</value>
-    </property>
-
-    <property>
-        <name>dfs.namenode.checkpoint.txns</name>
-        <value>1000000</value>
-    </property>
-
-    <property>
-        <name>dfs.namenode.handler.count</name>
-        <value>100</value>
-    </property>
-
     <property>
         <name>dfs.namenode.http-address</name>
-        <value>sandbox.hortonworks.com:50070</value>
-        <final>true</final>
-    </property>
-
-    <property>
-        <name>dfs.namenode.https-address</name>
-        <value>sandbox.hortonworks.com:50470</value>
-    </property>
-
-    <property>
-        <name>dfs.namenode.name.dir</name>
-        <value>/hadoop/hdfs/namenode</value>
-        <final>true</final>
-    </property>
-
-    <property>
-        <name>dfs.namenode.name.dir.restore</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>dfs.namenode.safemode.threshold-pct</name>
-        <value>1.0f</value>
+        <value>quickstart.cloudera:50070</value>
     </property>
-
     <property>
-        <name>dfs.namenode.secondary.http-address</name>
-        <value>sandbox.hortonworks.com:50090</value>
+        <name>dfs.replication</name>
+        <value>1</value>
     </property>
-
     <property>
-        <name>dfs.namenode.stale.datanode.interval</name>
-        <value>30000</value>
+        <name>dfs.blocksize</name>
+        <value>134217728</value>
     </property>
-
     <property>
-        <name>dfs.namenode.startup.delay.block.deletion.sec</name>
-        <value>3600</value>
+        <name>dfs.client.use.datanode.hostname</name>
+        <value>false</value>
     </property>
-
     <property>
-        <name>dfs.namenode.write.stale.datanode.ratio</name>
-        <value>1.0f</value>
+        <name>fs.permissions.umask-mode</name>
+        <value>022</value>
     </property>
-
     <property>
-        <name>dfs.nfs.exports.allowed.hosts</name>
-        <value>* rw</value>
+        <name>dfs.namenode.acls.enabled</name>
+        <value>false</value>
     </property>
-
     <property>
-        <name>dfs.nfs3.dump.dir</name>
-        <value>/tmp/.hdfs-nfs</value>
+        <name>dfs.client.use.legacy.blockreader</name>
+        <value>false</value>
     </property>
-
     <property>
-        <name>dfs.permissions.enabled</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>dfs.permissions.superusergroup</name>
-        <value>hdfs</value>
+        <name>dfs.client.read.shortcircuit</name>
+        <value>false</value>
     </property>
-
     <property>
-        <name>dfs.replication</name>
-        <value>1</value>
+        <name>dfs.domain.socket.path</name>
+        <value>/var/run/hdfs-sockets/dn</value>
     </property>
-
     <property>
-        <name>dfs.replication.max</name>
-        <value>50</value>
+        <name>dfs.client.read.shortcircuit.skip.checksum</name>
+        <value>false</value>
     </property>
-
     <property>
-        <name>dfs.support.append</name>
-        <value>true</value>
-        <final>true</final>
+        <name>dfs.client.domain.socket.data.traffic</name>
+        <value>false</value>
     </property>
-
     <property>
-        <name>dfs.webhdfs.enabled</name>
+        <name>dfs.datanode.hdfs-blocks-metadata.enabled</name>
         <value>true</value>
-        <final>true</final>
     </property>
-
-    <property>
-        <name>fs.permissions.umask-mode</name>
-        <value>022</value>
-    </property>
-
 </configuration>
diff --git a/examples/test_case_data/sandbox/mapred-site.xml b/examples/test_case_data/sandbox/mapred-site.xml
index e90f594e82..c9b1ca46a6 100644
--- a/examples/test_case_data/sandbox/mapred-site.xml
+++ b/examples/test_case_data/sandbox/mapred-site.xml
@@ -15,241 +15,165 @@
   limitations under the License.
 -->
 <configuration>
-
-    <property>
-        <name>io.sort.mb</name>
-        <value>128</value>
-    </property>
-
-    <property>
-        <name>mapred.child.java.opts</name>
-        <value>-Xmx200m</value>
-    </property>
-
-    <property>
-        <name>mapreduce.map.memory.mb</name>
-        <value>512</value>
-    </property>
-
-    <property>
-        <name>mapreduce.reduce.memory.mb</name>
-        <value>512</value>
-    </property>
-
-    <property>
-        <name>mapreduce.admin.map.child.java.opts</name>
-        <value>-server -XX:NewRatio=8 -Djava.net.preferIPv4Stack=true -Dhdp.version=${hdp.version}</value>
-    </property>
-
-    <property>
-        <name>mapreduce.admin.reduce.child.java.opts</name>
-        <value>-server -XX:NewRatio=8 -Djava.net.preferIPv4Stack=true -Dhdp.version=${hdp.version}</value>
-    </property>
-
-    <property>
-        <name>mapreduce.admin.user.env</name>
-        <value>LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-amd64-64</value>
-    </property>
-
-    <property>
-        <name>mapreduce.am.max-attempts</name>
-        <value>2</value>
-    </property>
-
-    <property>
-        <name>mapreduce.application.classpath</name>
-        <value>/tmp/kylin/*,$HADOOP_CONF_DIR,/usr/hdp/${hdp.version}/hbase/lib/hbase-common.jar,/usr/hdp/current/hive-client/conf/,$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.${hdp.version}.jar:/usr/hdp/${hdp.version}/hadoop/lib/snappy-java-1.0.4.1.jar:/etc/hadoop/conf/secure</value>
-    </property>
-
-    <property>
-        <name>mapreduce.application.framework.path</name>
-        <value>/hdp/apps/${hdp.version}/mapreduce/mapreduce.tar.gz#mr-framework</value>
-    </property>
-
-    <property>
-        <name>mapreduce.cluster.administrators</name>
-        <value>hadoop</value>
-    </property>
-
-    <property>
-        <name>mapreduce.framework.name</name>
-        <value>yarn</value>
-    </property>
-
-    <property>
-        <name>mapreduce.job.emit-timeline-data</name>
-        <value>false</value>
-    </property>
-
-    <!--the default value on hdp is 0.05, however for test environments we need to be conservative on resource -->
-    <property>
-        <name>mapreduce.job.reduce.slowstart.completedmaps</name>
-        <value>1</value>
-    </property>
-
-    <property>
-        <name>mapreduce.jobhistory.address</name>
-        <value>sandbox.hortonworks.com:10020</value>
-    </property>
-
-    <property>
-        <name>mapreduce.jobhistory.bind-host</name>
-        <value>0.0.0.0</value>
-    </property>
-
-    <property>
-        <name>mapreduce.jobhistory.done-dir</name>
-        <value>/mr-history/done</value>
-    </property>
-
-    <property>
-        <name>mapreduce.jobhistory.intermediate-done-dir</name>
-        <value>/mr-history/tmp</value>
-    </property>
-
-    <property>
-        <name>mapreduce.jobhistory.webapp.address</name>
-        <value>sandbox.hortonworks.com:19888</value>
-    </property>
-
-    <property>
-        <name>mapreduce.map.java.opts</name>
-        <value>-Xmx512m</value>
-    </property>
-
-    <property>
-        <name>mapreduce.map.log.level</name>
-        <value>INFO</value>
-    </property>
-
-    <property>
-        <name>mapreduce.map.memory.mb</name>
-        <value>512</value>
-    </property>
-
-    <property>
-        <name>mapreduce.map.output.compress</name>
-        <value>false</value>
-    </property>
-
-    <property>
-        <name>mapreduce.map.sort.spill.percent</name>
-        <value>0.7</value>
-    </property>
-
-    <property>
-        <name>mapreduce.map.speculative</name>
-        <value>false</value>
-    </property>
-
-    <property>
-        <name>mapreduce.output.fileoutputformat.compress</name>
-        <value>false</value>
-    </property>
-
-    <property>
-        <name>mapreduce.output.fileoutputformat.compress.type</name>
-        <value>BLOCK</value>
-    </property>
-
-    <property>
-        <name>mapreduce.reduce.input.buffer.percent</name>
-        <value>0.0</value>
-    </property>
-
-    <property>
-        <name>mapreduce.reduce.java.opts</name>
-        <value>-Xmx200m</value>
-    </property>
-
-    <property>
-        <name>mapreduce.reduce.log.level</name>
-        <value>INFO</value>
-    </property>
-
-    <property>
-        <name>mapreduce.reduce.memory.mb</name>
-        <value>512</value>
-    </property>
-
-    <property>
-        <name>mapreduce.reduce.shuffle.fetch.retry.enabled</name>
-        <value>1</value>
-    </property>
-
-    <property>
-        <name>mapreduce.reduce.shuffle.fetch.retry.interval-ms</name>
-        <value>1000</value>
-    </property>
-
-    <property>
-        <name>mapreduce.reduce.shuffle.fetch.retry.timeout-ms</name>
-        <value>30000</value>
-    </property>
-
-    <property>
-        <name>mapreduce.reduce.shuffle.input.buffer.percent</name>
-        <value>0.7</value>
-    </property>
-
-    <property>
-        <name>mapreduce.reduce.shuffle.merge.percent</name>
-        <value>0.66</value>
-    </property>
-
-    <property>
-        <name>mapreduce.reduce.shuffle.parallelcopies</name>
-        <value>30</value>
-    </property>
-
-    <property>
-        <name>mapreduce.reduce.speculative</name>
-        <value>false</value>
-    </property>
-
-    <property>
-        <name>mapreduce.shuffle.port</name>
-        <value>13562</value>
-    </property>
-
-    <property>
-        <name>mapreduce.task.io.sort.factor</name>
-        <value>100</value>
-    </property>
-
-    <property>
-        <name>mapreduce.task.io.sort.mb</name>
-        <value>128</value>
-    </property>
-
-    <property>
-        <name>mapreduce.task.timeout</name>
-        <value>300000</value>
-    </property>
-
-    <property>
-        <name>yarn.app.mapreduce.am.admin-command-opts</name>
-        <value>-Dhdp.version=${hdp.version}</value>
-    </property>
-
-    <property>
-        <name>yarn.app.mapreduce.am.command-opts</name>
-        <value>-Xmx512m</value>
-    </property>
-
-    <property>
-        <name>yarn.app.mapreduce.am.log.level</name>
-        <value>INFO</value>
-    </property>
-
-    <property>
-        <name>yarn.app.mapreduce.am.resource.mb</name>
-        <value>512</value>
-    </property>
-
-    <property>
-        <name>yarn.app.mapreduce.am.staging-dir</name>
-        <value>/user</value>
-    </property>
-
+<property>
+    <name>mapreduce.job.split.metainfo.maxsize</name>
+    <value>10000000</value>
+</property>
+<property>
+    <name>mapreduce.job.counters.max</name>
+    <value>120</value>
+</property>
+<property>
+    <name>mapreduce.output.fileoutputformat.compress</name>
+    <value>false</value>
+</property>
+<property>
+    <name>mapreduce.output.fileoutputformat.compress.type</name>
+    <value>BLOCK</value>
+</property>
+<property>
+    <name>mapreduce.output.fileoutputformat.compress.codec</name>
+    <value>org.apache.hadoop.io.compress.DefaultCodec</value>
+</property>
+<property>
+    <name>mapreduce.map.output.compress.codec</name>
+    <value>org.apache.hadoop.io.compress.SnappyCodec</value>
+</property>
+<property>
+    <name>mapreduce.map.output.compress</name>
+    <value>true</value>
+</property>
+<property>
+    <name>zlib.compress.level</name>
+    <value>DEFAULT_COMPRESSION</value>
+</property>
+<property>
+    <name>mapreduce.task.io.sort.factor</name>
+    <value>64</value>
+</property>
+<property>
+    <name>mapreduce.map.sort.spill.percent</name>
+    <value>0.8</value>
+</property>
+<property>
+    <name>mapreduce.reduce.shuffle.parallelcopies</name>
+    <value>10</value>
+</property>
+<property>
+    <name>mapreduce.task.timeout</name>
+    <value>600000</value>
+</property>
+<property>
+    <name>mapreduce.client.submit.file.replication</name>
+    <value>1</value>
+</property>
+<property>
+    <name>mapreduce.job.reduces</name>
+    <value>1</value>
+</property>
+<property>
+    <name>mapreduce.task.io.sort.mb</name>
+    <value>16</value>
+</property>
+<property>
+    <name>mapreduce.map.speculative</name>
+    <value>false</value>
+</property>
+<property>
+    <name>mapreduce.reduce.speculative</name>
+    <value>false</value>
+</property>
+<property>
+    <name>mapreduce.job.reduce.slowstart.completedmaps</name>
+    <value>0.8</value>
+</property>
+<property>
+    <name>mapreduce.jobhistory.address</name>
+    <value>quickstart.cloudera:10020</value>
+</property>
+<property>
+    <name>mapreduce.jobhistory.webapp.address</name>
+    <value>quickstart.cloudera:19888</value>
+</property>
+<property>
+    <name>mapreduce.jobhistory.webapp.https.address</name>
+    <value>quickstart.cloudera:19890</value>
+</property>
+<property>
+    <name>mapreduce.jobhistory.admin.address</name>
+    <value>quickstart.cloudera:10033</value>
+</property>
+<property>
+    <name>mapreduce.framework.name</name>
+    <value>yarn</value>
+</property>
+<property>
+    <name>yarn.app.mapreduce.am.staging-dir</name>
+    <value>/user</value>
+</property>
+<property>
+    <name>mapreduce.am.max-attempts</name>
+    <value>2</value>
+</property>
+<property>
+    <name>yarn.app.mapreduce.am.resource.mb</name>
+    <value>128</value>
+</property>
+<property>
+    <name>yarn.app.mapreduce.am.resource.cpu-vcores</name>
+    <value>1</value>
+</property>
+<property>
+    <name>mapreduce.job.ubertask.enable</name>
+    <value>false</value>
+</property>
+<property>
+    <name>yarn.app.mapreduce.am.command-opts</name>
+    <value>-Djava.net.preferIPv4Stack=true -Xmx52428800</value>
+</property>
+<property>
+    <name>mapreduce.map.java.opts</name>
+    <value>-Djava.net.preferIPv4Stack=true -Xmx52428800</value>
+</property>
+<property>
+    <name>mapreduce.reduce.java.opts</name>
+    <value>-Djava.net.preferIPv4Stack=true -Xmx52428800</value>
+</property>
+<property>
+    <name>yarn.app.mapreduce.am.admin.user.env</name>
+    <value>LD_LIBRARY_PATH=$HADOOP_COMMON_HOME/lib/native:$JAVA_LIBRARY_PATH</value>
+</property>
+<property>
+    <name>mapreduce.map.memory.mb</name>
+    <value>128</value>
+</property>
+<property>
+    <name>mapreduce.map.cpu.vcores</name>
+    <value>1</value>
+</property>
+<property>
+    <name>mapreduce.reduce.memory.mb</name>
+    <value>128</value>
+</property>
+<property>
+    <name>mapreduce.reduce.cpu.vcores</name>
+    <value>1</value>
+</property>
+<property>
+    <name>mapreduce.job.heap.memory-mb.ratio</name>
+    <value>0.8</value>
+</property>
+<property>
+    <name>mapreduce.application.classpath</name>
+    <value>/tmp/kylin/*,/usr/lib/hadoop-mapreduce/lib/*,/etc/hadoop/conf:/usr/lib/hadoop/lib/*:/usr/lib/hadoop/.//*:/usr/lib/hadoop-hdfs/./:/usr/lib/hadoop-hdfs/lib/*:/usr/lib/hadoop-hdfs/.//*:/usr/lib/hadoop-yarn/lib/*:/usr/lib/hadoop-yarn/.//*:/usr/lib/hadoop-mapreduce/lib/*:/usr/lib/hadoop-mapreduce/.//*,/usr/lib/hbase/hbase-common.jar,/etc/hive/conf</value>
+</property>
+<property>
+    <name>mapreduce.admin.user.env</name>
+    <value>LD_LIBRARY_PATH=$HADOOP_COMMON_HOME/lib/native:$JAVA_LIBRARY_PATH</value>
+</property>
+<property>
+    <name>mapreduce.shuffle.max.connections</name>
+    <value>80</value>
+</property>
 </configuration>
+
diff --git a/examples/test_case_data/sandbox/yarn-site.xml b/examples/test_case_data/sandbox/yarn-site.xml
index 8256158431..8988d4a18a 100644
--- a/examples/test_case_data/sandbox/yarn-site.xml
+++ b/examples/test_case_data/sandbox/yarn-site.xml
@@ -15,520 +15,128 @@
   limitations under the License.
 -->
 <configuration>
-
-    <property>
-        <name>hadoop.registry.rm.enabled</name>
-        <value>false</value>
-    </property>
-
-    <property>
-        <name>hadoop.registry.zk.quorum</name>
-        <value>sandbox.hortonworks.com:2181</value>
-    </property>
-
     <property>
         <name>yarn.acl.enable</name>
-        <value>false</value>
-    </property>
-
-    <property>
-        <name>yarn.admin.acl</name>
-        <value></value>
-    </property>
-
-    <property>
-        <name>yarn.application.classpath</name>
-        <value>$HADOOP_CONF_DIR,/usr/hdp/current/hadoop-client/*,/usr/hdp/current/hadoop-client/lib/*,/usr/hdp/current/hadoop-hdfs-client/*,/usr/hdp/current/hadoop-hdfs-client/lib/*,/usr/hdp/current/hadoop-yarn-client/*,/usr/hdp/current/hadoop-yarn-client/lib/*</value>
-    </property>
-
-    <property>
-        <name>yarn.client.nodemanager-connect.max-wait-ms</name>
-        <value>60000</value>
-    </property>
-
-    <property>
-        <name>yarn.client.nodemanager-connect.retry-interval-ms</name>
-        <value>10000</value>
-    </property>
-
-    <property>
-        <name>yarn.http.policy</name>
-        <value>HTTP_ONLY</value>
-    </property>
-
-    <property>
-        <name>yarn.log-aggregation-enable</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>yarn.log-aggregation.retain-seconds</name>
-        <value>2592000</value>
-    </property>
-
-    <property>
-        <name>yarn.log.server.url</name>
-        <value>http://sandbox.hortonworks.com:19888/jobhistory/logs</value>
-    </property>
-
-    <property>
-        <name>yarn.node-labels.fs-store.retry-policy-spec</name>
-        <value>2000, 500</value>
-    </property>
-
-    <property>
-        <name>yarn.node-labels.fs-store.root-dir</name>
-        <value>/system/yarn/node-labels</value>
-    </property>
-
-    <property>
-        <name>yarn.node-labels.manager-class</name>
-        <value>org.apache.hadoop.yarn.server.resourcemanager.nodelabels.MemoryRMNodeLabelsManager</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.address</name>
-        <value>0.0.0.0:45454</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.admin-env</name>
-        <value>MALLOC_ARENA_MAX=$MALLOC_ARENA_MAX</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.aux-services</name>
-        <value>mapreduce_shuffle</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.aux-services.mapreduce_shuffle.class</name>
-        <value>org.apache.hadoop.mapred.ShuffleHandler</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.bind-host</name>
-        <value>0.0.0.0</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.container-executor.class</name>
-        <value>org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.container-monitor.interval-ms</name>
-        <value>3000</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.delete.debug-delay-sec</name>
-        <value>0</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.disk-health-checker.max-disk-utilization-per-disk-percentage</name>
-        <value>90</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.disk-health-checker.min-free-space-per-disk-mb</name>
-        <value>1000</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.disk-health-checker.min-healthy-disks</name>
-        <value>0.25</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.health-checker.interval-ms</name>
-        <value>135000</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.health-checker.script.timeout-ms</name>
-        <value>60000</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.linux-container-executor.cgroups.hierarchy</name>
-        <value>hadoop-yarn</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.linux-container-executor.cgroups.mount</name>
-        <value>false</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.linux-container-executor.cgroups.strict-resource-usage</name>
-        <value>false</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.linux-container-executor.group</name>
-        <value>hadoop</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.linux-container-executor.resources-handler.class</name>
-        <value>org.apache.hadoop.yarn.server.nodemanager.util.DefaultLCEResourcesHandler</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.local-dirs</name>
-        <value>/hadoop/yarn/local</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.log-aggregation.compression-type</name>
-        <value>gz</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.log-aggregation.debug-enabled</name>
-        <value>false</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.log-aggregation.num-log-files-per-app</name>
-        <value>30</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.log-aggregation.roll-monitoring-interval-seconds</name>
-        <value>-1</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.log-dirs</name>
-        <value>/hadoop/yarn/log</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.log.retain-second</name>
-        <value>604800</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.pmem-check-enabled</name>
-        <value>false</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.recovery.dir</name>
-        <value>/var/log/hadoop-yarn/nodemanager/recovery-state</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.recovery.enabled</name>
         <value>true</value>
     </property>
-
-    <property>
-        <name>yarn.nodemanager.remote-app-log-dir</name>
-        <value>/app-logs</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.remote-app-log-dir-suffix</name>
-        <value>logs</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.resource.cpu-vcores</name>
-        <value>8</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.resource.memory-mb</name>
-        <value>9216</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.resource.percentage-physical-cpu-limit</name>
-        <value>100</value>
-    </property>
-
-    <property>
-        <name>yarn.nodemanager.vmem-check-enabled</name>
-        <value>false</value>
-    </property>
-
     <property>
-        <name>yarn.nodemanager.vmem-pmem-ratio</name>
-        <value>10</value>
+        <name>yarn.admin.acl</name>
+        <value>*</value>
     </property>
-
     <property>
         <name>yarn.resourcemanager.address</name>
-        <value>sandbox.hortonworks.com:8050</value>
+        <value>quickstart.cloudera:8032</value>
     </property>
-
     <property>
         <name>yarn.resourcemanager.admin.address</name>
-        <value>sandbox.hortonworks.com:8141</value>
-    </property>
-
-    <property>
-        <name>yarn.resourcemanager.am.max-attempts</name>
-        <value>2</value>
-    </property>
-
-    <property>
-        <name>yarn.resourcemanager.bind-host</name>
-        <value>0.0.0.0</value>
-    </property>
-
-    <property>
-        <name>yarn.resourcemanager.connect.max-wait.ms</name>
-        <value>900000</value>
-    </property>
-
-    <property>
-        <name>yarn.resourcemanager.connect.retry-interval.ms</name>
-        <value>30000</value>
-    </property>
-
-    <property>
-        <name>yarn.resourcemanager.fs.state-store.retry-policy-spec</name>
-        <value>2000, 500</value>
-    </property>
-
-    <property>
-        <name>yarn.resourcemanager.fs.state-store.uri</name>
-        <value></value>
-    </property>
-
-    <property>
-        <name>yarn.resourcemanager.ha.enabled</name>
-        <value>false</value>
-    </property>
-
-    <property>
-        <name>yarn.resourcemanager.hostname</name>
-        <value>sandbox.hortonworks.com</value>
-    </property>
-
-    <property>
-        <name>yarn.resourcemanager.nodes.exclude-path</name>
-        <value>/etc/hadoop/conf/yarn.exclude</value>
-    </property>
-
-    <property>
-        <name>yarn.resourcemanager.recovery.enabled</name>
-        <value>true</value>
+        <value>quickstart.cloudera:8033</value>
     </property>
-
-    <property>
-        <name>yarn.resourcemanager.resource-tracker.address</name>
-        <value>sandbox.hortonworks.com:8025</value>
-    </property>
-
     <property>
         <name>yarn.resourcemanager.scheduler.address</name>
-        <value>sandbox.hortonworks.com:8030</value>
+        <value>quickstart.cloudera:8030</value>
     </property>
-
     <property>
-        <name>yarn.resourcemanager.scheduler.class</name>
-        <value>org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler</value>
-    </property>
-
-    <property>
-        <name>yarn.resourcemanager.state-store.max-completed-applications</name>
-        <value>${yarn.resourcemanager.max-completed-applications}</value>
-    </property>
-
-    <property>
-        <name>yarn.resourcemanager.store.class</name>
-        <value>org.apache.hadoop.yarn.server.resourcemanager.recovery.ZKRMStateStore</value>
-    </property>
-
-    <property>
-        <name>yarn.resourcemanager.system-metrics-publisher.dispatcher.pool-size</name>
-        <value>10</value>
-    </property>
-
-    <property>
-        <name>yarn.resourcemanager.system-metrics-publisher.enabled</name>
-        <value>true</value>
+        <name>yarn.resourcemanager.resource-tracker.address</name>
+        <value>quickstart.cloudera:8031</value>
     </property>
-
     <property>
         <name>yarn.resourcemanager.webapp.address</name>
-        <value>sandbox.hortonworks.com:8088</value>
+        <value>quickstart.cloudera:8088</value>
     </property>
-
-    <property>
-        <name>yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled</name>
-        <value>false</value>
-    </property>
-
     <property>
         <name>yarn.resourcemanager.webapp.https.address</name>
-        <value>localhost:8090</value>
+        <value>quickstart.cloudera:8090</value>
     </property>
-
     <property>
-        <name>yarn.resourcemanager.webapp.proxyuser.hcat.groups</name>
-        <value>*</value>
+        <name>yarn.resourcemanager.client.thread-count</name>
+        <value>50</value>
     </property>
-
     <property>
-        <name>yarn.resourcemanager.webapp.proxyuser.hcat.hosts</name>
-        <value>*</value>
+        <name>yarn.resourcemanager.scheduler.client.thread-count</name>
+        <value>50</value>
     </property>
-
     <property>
-        <name>yarn.resourcemanager.webapp.proxyuser.oozie.groups</name>
-        <value>*</value>
+        <name>yarn.resourcemanager.admin.client.thread-count</name>
+        <value>1</value>
     </property>
-
     <property>
-        <name>yarn.resourcemanager.webapp.proxyuser.oozie.hosts</name>
-        <value>*</value>
+        <name>yarn.scheduler.minimum-allocation-mb</name>
+        <value>1</value>
     </property>
-
     <property>
-        <name>yarn.resourcemanager.work-preserving-recovery.enabled</name>
-        <value>true</value>
+        <name>yarn.scheduler.increment-allocation-mb</name>
+        <value>512</value>
     </property>
-
     <property>
-        <name>yarn.resourcemanager.work-preserving-recovery.scheduling-wait-ms</name>
-        <value>10000</value>
+        <name>yarn.scheduler.maximum-allocation-mb</name>
+        <value>2816</value>
     </property>
-
     <property>
-        <name>yarn.resourcemanager.zk-acl</name>
-        <value>world:anyone:rwcda</value>
+        <name>yarn.scheduler.minimum-allocation-vcores</name>
+        <value>1</value>
     </property>
-
     <property>
-        <name>yarn.resourcemanager.zk-address</name>
-        <value>localhost:2181</value>
+        <name>yarn.scheduler.increment-allocation-vcores</name>
+        <value>1</value>
     </property>
-
     <property>
-        <name>yarn.resourcemanager.zk-num-retries</name>
-        <value>1000</value>
+        <name>yarn.scheduler.maximum-allocation-vcores</name>
+        <value>2</value>
     </property>
-
     <property>
-        <name>yarn.resourcemanager.zk-retry-interval-ms</name>
+        <name>yarn.resourcemanager.amliveliness-monitor.interval-ms</name>
         <value>1000</value>
     </property>
-
     <property>
-        <name>yarn.resourcemanager.zk-state-store.parent-path</name>
-        <value>/rmstore</value>
+        <name>yarn.am.liveness-monitor.expiry-interval-ms</name>
+        <value>600000</value>
     </property>
-
-    <property>
-        <name>yarn.resourcemanager.zk-timeout-ms</name>
-        <value>10000</value>
-    </property>
-
-    <property>
-        <name>yarn.scheduler.maximum-allocation-mb</name>
-        <value>9216</value>
-    </property>
-
     <property>
-        <name>yarn.scheduler.minimum-allocation-mb</name>
-        <value>1536</value>
+        <name>yarn.resourcemanager.am.max-attempts</name>
+        <value>2</value>
     </property>
-
     <property>
-        <name>yarn.timeline-service.address</name>
-        <value>sandbox.hortonworks.com:10200</value>
+        <name>yarn.resourcemanager.container.liveness-monitor.interval-ms</name>
+        <value>600000</value>
     </property>
-
     <property>
-        <name>yarn.timeline-service.bind-host</name>
-        <value>0.0.0.0</value>
+        <name>yarn.resourcemanager.nm.liveness-monitor.interval-ms</name>
+        <value>1000</value>
     </property>
-
     <property>
-        <name>yarn.timeline-service.client.max-retries</name>
-        <value>30</value>
+        <name>yarn.nm.liveness-monitor.expiry-interval-ms</name>
+        <value>600000</value>
     </property>
-
     <property>
-        <name>yarn.timeline-service.client.retry-interval-ms</name>
-        <value>1000</value>
+        <name>yarn.resourcemanager.resource-tracker.client.thread-count</name>
+        <value>50</value>
     </property>
-
     <property>
-        <name>yarn.timeline-service.enabled</name>
-        <value>true</value>
+        <name>yarn.application.classpath</name>
+        <value>$HADOOP_CLIENT_CONF_DIR,$HADOOP_CONF_DIR,$HADOOP_COMMON_HOME/*,$HADOOP_COMMON_HOME/lib/*,$HADOOP_HDFS_HOME/*,$HADOOP_HDFS_HOME/lib/*,$HADOOP_YARN_HOME/*,$HADOOP_YARN_HOME/lib/*</value>
     </property>
-
     <property>
-        <name>yarn.timeline-service.generic-application-history.store-class</name>
-        <value>org.apache.hadoop.yarn.server.applicationhistoryservice.NullApplicationHistoryStore</value>
+        <name>yarn.resourcemanager.scheduler.class</name>
+        <value>org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler</value>
     </property>
-
     <property>
-        <name>yarn.timeline-service.http-authentication.simple.anonymous.allowed</name>
+        <name>yarn.scheduler.fair.user-as-default-queue</name>
         <value>true</value>
     </property>
-
-    <property>
-        <name>yarn.timeline-service.http-authentication.type</name>
-        <value>simple</value>
-    </property>
-
     <property>
-        <name>yarn.timeline-service.leveldb-timeline-store.path</name>
-        <value>/hadoop/yarn/timeline</value>
+        <name>yarn.scheduler.fair.preemption</name>
+        <value>false</value>
     </property>
-
     <property>
-        <name>yarn.timeline-service.leveldb-timeline-store.read-cache-size</name>
-        <value>104857600</value>
+        <name>yarn.scheduler.fair.sizebasedweight</name>
+        <value>false</value>
     </property>
-
     <property>
-        <name>yarn.timeline-service.leveldb-timeline-store.start-time-read-cache-size</name>
-        <value>10000</value>
+        <name>yarn.scheduler.fair.assignmultiple</name>
+        <value>false</value>
     </property>
-
     <property>
-        <name>yarn.timeline-service.leveldb-timeline-store.start-time-write-cache-size</name>
+        <name>yarn.resourcemanager.max-completed-applications</name>
         <value>10000</value>
     </property>
-
-    <property>
-        <name>yarn.timeline-service.leveldb-timeline-store.ttl-interval-ms</name>
-        <value>300000</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.store-class</name>
-        <value>org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.ttl-enable</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.ttl-ms</name>
-        <value>2678400000</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.webapp.address</name>
-        <value>sandbox.hortonworks.com:8188</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.webapp.https.address</name>
-        <value>sandbox.hortonworks.com:8190</value>
-    </property>
-
 </configuration>
diff --git a/jdbc/pom.xml b/jdbc/pom.xml
index 4c03768948..c0d83155bf 100644
--- a/jdbc/pom.xml
+++ b/jdbc/pom.xml
@@ -17,8 +17,7 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
     <artifactId>kylin-jdbc</artifactId>
     <packaging>jar</packaging>
@@ -28,7 +27,7 @@
     <parent>
         <groupId>org.apache.kylin</groupId>
         <artifactId>kylin</artifactId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
 
     </parent>
 
diff --git a/kylin-it/pom.xml b/kylin-it/pom.xml
index 43e47c9743..70cfa9a7c3 100644
--- a/kylin-it/pom.xml
+++ b/kylin-it/pom.xml
@@ -18,12 +18,11 @@
 -->
 
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <parent>
         <artifactId>kylin</artifactId>
         <groupId>org.apache.kylin</groupId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
@@ -31,8 +30,8 @@
     <name>Apache Kylin - Integration Test</name>
 
     <properties>
-        <hdp.version/>
-        <fastBuildMode/>
+        <hdp.version />
+        <fastBuildMode />
     </properties>
 
     <!-- Dependencies. -->
@@ -298,7 +297,7 @@
                                         <argument>-DfastBuildMode=${fastBuildMode}</argument>
                                         <argument>-Dlog4j.configuration=kylin-log4j.properties</argument>
                                         <argument>-classpath</argument>
-                                        <classpath/>
+                                        <classpath />
                                         <argument>org.apache.kylin.provision.BuildCubeWithEngine</argument>
                                     </arguments>
                                     <workingDirectory>${project.basedir}</workingDirectory>
@@ -320,7 +319,7 @@
                                         <argument>-DfastBuildMode=${fastBuildMode}</argument>
                                         <argument>-Dlog4j.configuration=kylin-log4j.properties</argument>
                                         <argument>-classpath</argument>
-                                        <classpath/>
+                                        <classpath />
                                         <argument>org.apache.kylin.provision.BuildCubeWithStream</argument>
                                     </arguments>
                                     <workingDirectory>${project.basedir}</workingDirectory>
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
index f6c8801d28..6c20e8a53a 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
@@ -35,8 +35,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
@@ -55,6 +54,10 @@
 import org.apache.kylin.job.execution.ExecutableState;
 import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
 import org.apache.kylin.job.manager.ExecutableManager;
+import org.apache.kylin.source.ISource;
+import org.apache.kylin.source.SourceFactory;
+import org.apache.kylin.source.SourcePartition;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.util.HBaseRegionSizeCalculator;
 import org.apache.kylin.storage.hbase.util.StorageCleanupJob;
 import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
@@ -404,7 +407,10 @@ private String mergeSegment(String cubeName, long startDate, long endDate) throw
     }
 
     private String buildSegment(String cubeName, long startDate, long endDate) throws Exception {
-        CubeSegment segment = cubeManager.appendSegment(cubeManager.getCube(cubeName), 0, endDate);
+        CubeInstance cubeInstance = cubeManager.getCube(cubeName);
+        ISource source = SourceFactory.tableSource(cubeInstance);
+        SourcePartition partition = source.parsePartitionBeforeBuild(cubeInstance, new SourcePartition(0, endDate, 0, 0, null, null));
+        CubeSegment segment = cubeManager.appendSegment(cubeInstance, partition.getStartDate(), partition.getEndDate());
         DefaultChainedExecutable job = EngineFactory.createBatchCubingJob(segment, "TEST");
         jobService.addJob(job);
         waitForJob(job.getId());
@@ -425,10 +431,10 @@ private int cleanupOldStorage() throws Exception {
     }
 
     private void checkHFilesInHBase(CubeSegment segment) throws IOException {
-        Configuration conf = HBaseConfiguration.create(HadoopUtil.getCurrentConfiguration());
-        String tableName = segment.getStorageLocationIdentifier();
-        try (HTable table = new HTable(conf, tableName)) {
-            HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(table);
+        try (Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl())) {
+            String tableName = segment.getStorageLocationIdentifier();
+
+            HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(tableName, conn);
             Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
             long totalSize = 0;
             for (Long size : sizeMap.values()) {
@@ -454,5 +460,4 @@ private void checkHFilesInHBase(CubeSegment segment) throws IOException {
             }
         }
     }
-
 }
\ No newline at end of file
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
index 2faa8d0721..9fd6c52cad 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.provision;
 
+import static java.lang.Thread.sleep;
+
 import java.io.File;
 import java.io.IOException;
 import java.text.ParseException;
@@ -32,7 +34,6 @@
 import java.util.concurrent.FutureTask;
 import java.util.concurrent.TimeUnit;
 
-import com.google.common.collect.Lists;
 import org.I0Itec.zkclient.ZkConnection;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kafka.common.requests.MetadataResponse;
@@ -44,8 +45,6 @@
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.cube.CubeUpdate;
 import org.apache.kylin.engine.EngineFactory;
-import org.apache.kylin.metadata.streaming.StreamingConfig;
-import org.apache.kylin.metadata.streaming.StreamingManager;
 import org.apache.kylin.job.DeployUtil;
 import org.apache.kylin.job.engine.JobEngineConfig;
 import org.apache.kylin.job.execution.AbstractExecutable;
@@ -55,6 +54,11 @@
 import org.apache.kylin.job.manager.ExecutableManager;
 import org.apache.kylin.job.streaming.Kafka10DataLoader;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
+import org.apache.kylin.metadata.streaming.StreamingConfig;
+import org.apache.kylin.metadata.streaming.StreamingManager;
+import org.apache.kylin.source.ISource;
+import org.apache.kylin.source.SourceFactory;
+import org.apache.kylin.source.SourcePartition;
 import org.apache.kylin.source.kafka.KafkaConfigManager;
 import org.apache.kylin.source.kafka.config.BrokerConfig;
 import org.apache.kylin.source.kafka.config.KafkaConfig;
@@ -64,7 +68,7 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static java.lang.Thread.sleep;
+import com.google.common.collect.Lists;
 
 /**
  *  for streaming cubing case "test_streaming_table"
@@ -214,13 +218,9 @@ public ExecutableState call() {
         Assert.assertTrue(segments.size() == succeedBuild);
 
         if (fastBuildMode == false) {
-            //empty build
-            ExecutableState result = buildSegment(cubeName, 0, Long.MAX_VALUE);
-            Assert.assertTrue(result == ExecutableState.DISCARDED);
-
             long endOffset = segments.get(segments.size() - 1).getSourceOffsetEnd();
             //merge
-            result = mergeSegment(cubeName, 0, endOffset);
+            ExecutableState result = mergeSegment(cubeName, 0, endOffset);
             Assert.assertTrue(result == ExecutableState.SUCCEED);
 
             segments = cubeManager.getCube(cubeName).getSegments();
@@ -253,7 +253,10 @@ private String refreshSegment(String cubeName, long startOffset, long endOffset)
     }
 
     protected ExecutableState buildSegment(String cubeName, long startOffset, long endOffset) throws Exception {
-        CubeSegment segment = cubeManager.appendSegment(cubeManager.getCube(cubeName), 0, 0, startOffset, endOffset, null, null);
+        CubeInstance cubeInstance = cubeManager.getCube(cubeName);
+        ISource source = SourceFactory.tableSource(cubeInstance);
+        SourcePartition partition = source.parsePartitionBeforeBuild(cubeInstance, new SourcePartition(0, 0, startOffset, endOffset, null, null));
+        CubeSegment segment = cubeManager.appendSegment(cubeManager.getCube(cubeName), partition);
         DefaultChainedExecutable job = EngineFactory.createBatchCubingJob(segment, "TEST");
         jobService.addJob(job);
         waitForJob(job.getId());
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
index 2ec5324e0b..61926d8fa4 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
@@ -102,7 +102,7 @@ public void testTimeoutQuery() throws Exception {
             toggles.put(BackdoorToggles.DEBUG_TOGGLE_COPROCESSOR_BEHAVIOR, StorageSideBehavior.SCAN_FILTER_AGGR_CHECKMEM_WITHDELAY.toString());//delay 10ms for every scan
             BackdoorToggles.setToggles(toggles);
 
-            KylinConfig.getInstanceFromEnv().setProperty("kylin.query.cube.visit.timeout.times", "0.01");//set timeout to 3s
+            KylinConfig.getInstanceFromEnv().setProperty("kylin.query.coprocessor.timeout.seconds", "3");
 
             //these two cubes has RAW measure, will disturb limit push down
             RemoveBlackoutRealizationsRule.blackList.add("CUBE[name=test_kylin_cube_without_slr_left_join_empty]");
@@ -115,7 +115,7 @@ public void testTimeoutQuery() throws Exception {
             RemoveBlackoutRealizationsRule.blackList.remove("CUBE[name=test_kylin_cube_without_slr_left_join_empty]");
             RemoveBlackoutRealizationsRule.blackList.remove("CUBE[name=test_kylin_cube_without_slr_inner_join_empty]");
 
-            KylinConfig.getInstanceFromEnv().setProperty("kylin.query.cube.visit.timeout.times", "1");//set timeout to 9s 
+            KylinConfig.getInstanceFromEnv().setProperty("kylin.query.coprocessor.timeout.seconds", "0"); // set timeout to default
             BackdoorToggles.cleanToggles();
         }
     }
diff --git a/pom.xml b/pom.xml
index c7538e4ddb..21518caa20 100644
--- a/pom.xml
+++ b/pom.xml
@@ -16,22 +16,21 @@
  See the License for the specific language governing permissions and
  limitations under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
     <parent>
         <groupId>org.apache</groupId>
         <artifactId>apache</artifactId>
         <version>16</version>
-        <relativePath/>
+        <relativePath />
         <!-- no parent resolution -->
     </parent>
 
     <groupId>org.apache.kylin</groupId>
     <artifactId>kylin</artifactId>
     <packaging>pom</packaging>
-    <version>1.6.0-SNAPSHOT</version>
+    <version>1.6.0</version>
 
     <name>Apache Kylin</name>
     <url>http://kylin.apache.org</url>
@@ -46,20 +45,20 @@
         <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
 
         <!-- Hadoop versions -->
-        <hadoop2.version>2.6.0</hadoop2.version>
-        <yarn.version>2.6.0</yarn.version>
+        <hadoop2.version>2.6.0-cdh5.9.0</hadoop2.version>
+        <yarn.version>2.6.0-cdh5.9.0</yarn.version>
 
         <!-- Hive versions -->
-        <hive.version>0.14.0</hive.version>
-        <hive-hcatalog.version>0.14.0</hive-hcatalog.version>
+        <hive.version>1.1.0-cdh5.9.0</hive.version>
+        <hive-hcatalog.version>1.1.0-cdh5.9.0</hive-hcatalog.version>
 
         <!-- HBase versions -->
-        <hbase-hadoop2.version>0.98.8-hadoop2</hbase-hadoop2.version>
+        <hbase-hadoop2.version>1.2.0-cdh5.9.0</hbase-hadoop2.version>
         <kafka.version>0.10.0.0</kafka.version>
 
         <!-- Hadoop deps, keep compatible with hadoop2.version -->
-        <zookeeper.version>3.4.6</zookeeper.version>
-        <curator.version>2.6.0</curator.version>
+	<zookeeper.version>3.4.5-cdh5.9.0</zookeeper.version>
+        <curator.version>2.7.1</curator.version>
         <jackson.version>2.2.4</jackson.version>
         <jsr305.version>3.0.1</jsr305.version>
         <guava.version>14.0</guava.version>
@@ -84,7 +83,7 @@
         <commons-math3.version>3.6.1</commons-math3.version>
 
         <!-- Spark -->
-        <spark.version>1.3.0</spark.version>
+	<spark.version>1.6.0-cdh5.9.0</spark.version>
 
         <!-- Utility -->
         <log4j.version>1.2.17</log4j.version>
@@ -180,7 +179,7 @@
         <connection>scm:git:https://git-wip-us.apache.org/repos/asf/kylin.git</connection>
         <developerConnection>scm:git:https://git-wip-us.apache.org/repos/asf/kylin.git</developerConnection>
         <url>https://git-wip-us.apache.org/repos/asf/kylin.git</url>
-        <tag>HEAD</tag>
+        <tag>kylin-1.6.0</tag>
     </scm>
 
     <dependencyManagement>
@@ -814,6 +813,10 @@
             <id>conjars</id>
             <url>http://conjars.org/repo/</url>
         </repository>
+        <repository>
+            <id>cloudera</id>
+            <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
+        </repository>
     </repositories>
 
     <build>
@@ -985,9 +988,28 @@
                                         </goals>
                                     </pluginExecutionFilter>
                                     <action>
-                                        <ignore/>
+                                        <ignore />
                                     </action>
                                 </pluginExecution>
+                                <pluginExecution>
+                                	<pluginExecutionFilter>
+                                		<groupId>
+                                			org.apache.maven.plugins
+                                		</groupId>
+                                		<artifactId>
+                                			maven-dependency-plugin
+                                		</artifactId>
+                                		<versionRange>
+                                			[2.10,)
+                                		</versionRange>
+                                		<goals>
+                                			<goal>copy</goal>
+                                		</goals>
+                                	</pluginExecutionFilter>
+                                	<action>
+                                		<ignore></ignore>
+                                	</action>
+                                </pluginExecution>
                             </pluginExecutions>
                         </lifecycleMappingMetadata>
                     </configuration>
@@ -1300,4 +1322,4 @@
             </build>
         </profile>
     </profiles>
-</project>
+</project>
\ No newline at end of file
diff --git a/query/pom.xml b/query/pom.xml
index 6e063f6692..11ca72a119 100644
--- a/query/pom.xml
+++ b/query/pom.xml
@@ -17,8 +17,7 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
     <!-- The basics. -->
     <artifactId>kylin-query</artifactId>
@@ -29,7 +28,7 @@
     <parent>
         <groupId>org.apache.kylin</groupId>
         <artifactId>kylin</artifactId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
 
     </parent>
 
diff --git a/query/src/main/java/org/apache/kylin/query/enumerator/OLAPEnumerator.java b/query/src/main/java/org/apache/kylin/query/enumerator/OLAPEnumerator.java
index dafbb5f61c..c7b3c71621 100644
--- a/query/src/main/java/org/apache/kylin/query/enumerator/OLAPEnumerator.java
+++ b/query/src/main/java/org/apache/kylin/query/enumerator/OLAPEnumerator.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.query.enumerator;
 
+import java.util.Arrays;
 import java.util.Map;
 import java.util.Properties;
 
@@ -77,10 +78,9 @@ public boolean moveNext() {
     }
 
     private Object[] convertCurrentRow(ITuple tuple) {
-        // make sure the tuple layout is correct
-        //assert tuple.getAllFields().equals(olapContext.returnTupleInfo.getAllFields());
-
-        current = tuple.getAllValues();
+        // give calcite a new array every time, see details in KYLIN-2134
+        Object[] values = tuple.getAllValues();
+        current = Arrays.copyOf(values, values.length);
         return current;
     }
 
diff --git a/server-base/pom.xml b/server-base/pom.xml
index 67013e4266..61f95d0bd8 100644
--- a/server-base/pom.xml
+++ b/server-base/pom.xml
@@ -17,8 +17,7 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
     <artifactId>kylin-server-base</artifactId>
@@ -28,7 +27,7 @@
     <parent>
         <groupId>org.apache.kylin</groupId>
         <artifactId>kylin</artifactId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
 
     </parent>
 
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
index c70b506cfc..f78f4396e1 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
@@ -27,7 +27,6 @@
 import java.util.Set;
 import java.util.UUID;
 
-import com.google.common.collect.Lists;
 import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.cube.CubeInstance;
@@ -35,13 +34,13 @@
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.cube.model.CubeBuildTypeEnum;
 import org.apache.kylin.cube.model.CubeDesc;
+import org.apache.kylin.cube.model.CubeJoinedFlatTableDesc;
 import org.apache.kylin.dimension.DimensionEncodingFactory;
 import org.apache.kylin.engine.EngineFactory;
 import org.apache.kylin.job.JobInstance;
 import org.apache.kylin.job.JoinedFlatTable;
 import org.apache.kylin.metadata.model.IJoinedFlatTableDesc;
 import org.apache.kylin.metadata.model.ISourceAware;
-import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
 import org.apache.kylin.rest.exception.BadRequestException;
@@ -74,6 +73,7 @@
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.JsonMappingException;
 import com.google.common.base.Joiner;
+import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 
 /**
@@ -144,10 +144,15 @@ public CubeInstance getCube(@PathVariable String cubeName) {
     @ResponseBody
     public GeneralResponse getSql(@PathVariable String cubeName, @PathVariable String segmentName) {
         CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
-        CubeSegment cubeSegment = cube.getSegment(segmentName, SegmentStatusEnum.READY);
-        IJoinedFlatTableDesc flatTableDesc = EngineFactory.getJoinedFlatTableDesc(cubeSegment);
-        String sql = JoinedFlatTable.generateSelectDataStatement(flatTableDesc, false);
+        IJoinedFlatTableDesc flatTableDesc = null;
+        CubeSegment segment = cube.getSegment(segmentName, null);
+        if (segment != null) {
+            flatTableDesc = EngineFactory.getJoinedFlatTableDesc(segment);
+        } else {
+            flatTableDesc = new CubeJoinedFlatTableDesc(cube.getDescriptor());
+        }
 
+        String sql = JoinedFlatTable.generateSelectDataStatement(flatTableDesc, false);
         GeneralResponse repsonse = new GeneralResponse();
         repsonse.setProperty("sql", sql);
 
@@ -611,7 +616,6 @@ public CubeRequest updateCubeDesc(@RequestBody CubeRequest cubeRequest) throws J
 
     }
 
-
     /**
      * Initiate the very beginning of a streaming cube. Will seek the latest offests of each partition from streaming
      * source (kafka) and record in the cube descriptor; In the first build job, it will use these offests as the start point.
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java
index d9050c1404..47ff3fec75 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/TableController.java
@@ -47,7 +47,8 @@
 import org.apache.kylin.rest.service.ModelService;
 import org.apache.kylin.rest.service.ProjectService;
 import org.apache.kylin.rest.service.StreamingService;
-import org.apache.kylin.source.hive.HiveClient;
+import org.apache.kylin.source.hive.HiveClientFactory;
+import org.apache.kylin.source.hive.IHiveClient;
 import org.apache.kylin.source.kafka.config.KafkaConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -189,7 +190,7 @@ private boolean unLoadHiveTable(String tableName, String project) {
         String[] dbTableName = HadoopUtil.parseHiveTableName(tableName);
         tableName = dbTableName[0] + "." + dbTableName[1];
         TableDesc desc = cubeMgmtService.getMetadataManager().getTableDesc(tableName);
-        if(desc == null)
+        if (desc == null)
             return false;
         tableType = desc.getSourceType();
 
@@ -315,7 +316,7 @@ public CardinalityRequest generateCardinality(@PathVariable String tableNames, @
     @RequestMapping(value = "/hive", method = { RequestMethod.GET })
     @ResponseBody
     private static List<String> showHiveDatabases() throws IOException {
-        HiveClient hiveClient = new HiveClient();
+        IHiveClient hiveClient = HiveClientFactory.getHiveClient();
         List<String> results = null;
 
         try {
@@ -336,7 +337,7 @@ public CardinalityRequest generateCardinality(@PathVariable String tableNames, @
     @RequestMapping(value = "/hive/{database}", method = { RequestMethod.GET })
     @ResponseBody
     private static List<String> showHiveTables(@PathVariable String database) throws IOException {
-        HiveClient hiveClient = new HiveClient();
+        IHiveClient hiveClient = HiveClientFactory.getHiveClient();
         List<String> results = null;
 
         try {
diff --git a/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java b/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java
index 91aff8b94c..48a8e58262 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/metrics/QueryMetricsFacade.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.rest.metrics;
 
+import org.apache.hadoop.metrics2.MetricsException;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.rest.request.SQLRequest;
@@ -38,28 +39,28 @@
 
     private static boolean enabled = false;
     private static ConcurrentHashMap<String, QueryMetrics> metricsMap = new ConcurrentHashMap<String, QueryMetrics>();
-    
+
     public static void init() {
         enabled = KylinConfig.getInstanceFromEnv().getQueryMetricsEnabled();
         if (!enabled)
             return;
-        
+
         DefaultMetricsSystem.initialize("Kylin");
     }
 
     public static void updateMetrics(SQLRequest sqlRequest, SQLResponse sqlResponse) {
         if (!enabled)
             return;
-        
+
         String projectName = sqlRequest.getProject();
         String cubeName = sqlResponse.getCube();
 
-        update(getQueryMetrics("Server_Total", metricsMap), sqlResponse);
+        update(getQueryMetrics("Server_Total"), sqlResponse);
 
-        update(getQueryMetrics(projectName, metricsMap), sqlResponse);
+        update(getQueryMetrics(projectName), sqlResponse);
 
         String cubeMetricName = projectName + ",sub=" + cubeName;
-        update(getQueryMetrics(cubeMetricName, metricsMap), sqlResponse);
+        update(getQueryMetrics(cubeMetricName), sqlResponse);
     }
 
     private static void update(QueryMetrics queryMetrics, SQLResponse sqlResponse) {
@@ -93,16 +94,29 @@ private static void incrCacheHitCount(QueryMetrics queryMetrics, SQLResponse sql
         }
     }
 
-    private static QueryMetrics getQueryMetrics(String name, ConcurrentHashMap<String, QueryMetrics> metricsMap) {
+    private static QueryMetrics getQueryMetrics(String name) {
         KylinConfig config = KylinConfig.getInstanceFromEnv();
         int[] intervals = config.getQueryMetricsPercentilesIntervals();
 
-        if (metricsMap.containsKey(name)) {
-            return metricsMap.get(name);
-        } else {
-            QueryMetrics queryMetrics = new QueryMetrics(intervals).registerWith(name);
-            metricsMap.put(name, queryMetrics);
+        QueryMetrics queryMetrics = metricsMap.get(name);
+        if (queryMetrics != null) {
             return queryMetrics;
         }
+
+        synchronized (QueryMetricsFacade.class) {
+            queryMetrics = metricsMap.get(name);
+            if (queryMetrics != null) {
+                return queryMetrics;
+            }
+
+            try {
+                queryMetrics = new QueryMetrics(intervals).registerWith(name);
+                metricsMap.put(name, queryMetrics);
+                return queryMetrics;
+            } catch (MetricsException e) {
+                logger.warn(name + " register error: ", e);
+            }
+        }
+        return queryMetrics;
     }
 }
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
index 38f299e857..bfb5fe417a 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
@@ -20,7 +20,7 @@
 
 import java.io.IOException;
 
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Table;
 
 /**
  */
@@ -37,6 +37,6 @@
 
     String prepareHBaseTable(Class<?> clazz) throws IOException;
 
-    HTableInterface getTable(String tableName) throws IOException;
+    Table getTable(String tableName) throws IOException;
 
 }
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/AuthoritiesPopulator.java b/server-base/src/main/java/org/apache/kylin/rest/security/AuthoritiesPopulator.java
index 7983fc076d..8fad4f706c 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/AuthoritiesPopulator.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/AuthoritiesPopulator.java
@@ -21,6 +21,7 @@
 import java.util.HashSet;
 import java.util.Set;
 
+import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.rest.constant.Constant;
 import org.springframework.ldap.core.ContextSource;
 import org.springframework.security.core.GrantedAuthority;
@@ -32,7 +33,6 @@
  * 
  */
 public class AuthoritiesPopulator extends DefaultLdapAuthoritiesPopulator {
-
     String adminRole;
     SimpleGrantedAuthority adminRoleAsAuthority;
 
@@ -48,9 +48,28 @@
      */
     public AuthoritiesPopulator(ContextSource contextSource, String groupSearchBase, String adminRole, String defaultRole) {
         super(contextSource, groupSearchBase);
+        
+        this.adminRole = adminRole;
+        this.adminRoleAsAuthority = new SimpleGrantedAuthority(adminRole);
+        if (defaultRole.contains(Constant.ROLE_MODELER))
+            this.defaultAuthorities.add(modelerAuthority);
+        if (defaultRole.contains(Constant.ROLE_ANALYST))
+            this.defaultAuthorities.add(analystAuthority);
+    }
+    
+    /**
+     * @param contextSource
+     * @param groupSearchBase
+     */
+    public AuthoritiesPopulator(ContextSource contextSource, String groupSearchBase, String adminRole, String defaultRole, String groupSearchFilter) {
+        super(contextSource, groupSearchBase);
+        
+        if (!StringUtils.isBlank(groupSearchFilter)){
+            setGroupSearchFilter(groupSearchFilter);
+        }
+        
         this.adminRole = adminRole;
         this.adminRoleAsAuthority = new SimpleGrantedAuthority(adminRole);
-
         if (defaultRole.contains(Constant.ROLE_MODELER))
             this.defaultAuthorities.add(modelerAuthority);
         if (defaultRole.contains(Constant.ROLE_ANALYST))
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
index d9326f500b..cc76b877c8 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
@@ -21,7 +21,7 @@
 import java.io.IOException;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.rest.service.AclService;
 import org.apache.kylin.rest.service.QueryService;
@@ -34,8 +34,8 @@
     private static final String aclTableName = "MOCK-ACL-TABLE";
     private static final String userTableName = "MOCK-USER-TABLE";
 
-    private HTableInterface mockedAclTable;
-    private HTableInterface mockedUserTable;
+    private Table mockedAclTable;
+    private Table mockedUserTable;
     private RealAclHBaseStorage realAcl;
 
     public MockAclHBaseStorage() {
@@ -65,7 +65,7 @@ public String prepareHBaseTable(Class<?> clazz) throws IOException {
     }
 
     @Override
-    public HTableInterface getTable(String tableName) throws IOException {
+    public Table getTable(String tableName) throws IOException {
         if (realAcl != null) {
             return realAcl.getTable(tableName);
         }
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java b/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
index d0aa0ed633..972eea9fc1 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
@@ -51,7 +51,7 @@
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;
@@ -91,7 +91,7 @@
  *     <li>remove some methods for loading data, checking values ...</li>
  * </ul>
  */
-public class MockHTable implements HTableInterface {
+public class MockHTable implements Table {
     private final String tableName;
     private final List<String> columnFamilies = new ArrayList<>();
 
@@ -114,14 +114,6 @@ public void addColumnFamily(String columnFamily) {
         this.columnFamilies.add(columnFamily);
     }
 
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    public byte[] getTableName() {
-        return tableName.getBytes();
-    }
-
     @Override
     public TableName getName() {
         return null;
@@ -200,8 +192,8 @@ public boolean exists(Get get) throws IOException {
     }
 
     @Override
-    public Boolean[] exists(List<Get> gets) throws IOException {
-        return new Boolean[0];
+    public boolean[] existsAll(List<Get> list) throws IOException {
+        return new boolean[0];
     }
 
     /**
@@ -302,15 +294,6 @@ public Result get(Get get) throws IOException {
         return results.toArray(new Result[results.size()]);
     }
 
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    public Result getRowOrBefore(byte[] row, byte[] family) throws IOException {
-        // FIXME: implement
-        return null;
-    }
-
     /**
      * {@inheritDoc}
      */
@@ -446,7 +429,7 @@ public void close() {
              */
         }
         if (filter.hasFilterRow() && !filteredOnRowKey) {
-            filter.filterRow(nkvs);
+            filter.filterRow();
         }
         if (filter.filterRow() || filteredOnRowKey) {
             nkvs.clear();
@@ -535,6 +518,11 @@ public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, byte[] v
         return false;
     }
 
+    @Override
+    public boolean checkAndPut(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, byte[] bytes3, Put put) throws IOException {
+        return false;
+    }
+
     /**
      * {@inheritDoc}
      */
@@ -555,7 +543,7 @@ public void delete(Delete delete) throws IOException {
                 continue;
             }
             for (KeyValue kv : delete.getFamilyMap().get(family)) {
-                if (kv.isDeleteFamily()) {
+                if (kv.isDelete()) {
                     data.get(row).get(kv.getFamily()).clear();
                 } else {
                     data.get(row).get(kv.getFamily()).remove(kv.getQualifier());
@@ -592,6 +580,11 @@ public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, byte[
         return false;
     }
 
+    @Override
+    public boolean checkAndDelete(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, byte[] bytes3, Delete delete) throws IOException {
+        return false;
+    }
+
     /**
      * {@inheritDoc}
      */
@@ -605,7 +598,7 @@ public Result increment(Increment increment) throws IOException {
      */
     @Override
     public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount) throws IOException {
-        return incrementColumnValue(row, family, qualifier, amount, true);
+        return incrementColumnValue(row, family, qualifier, amount, null);
     }
 
     @Override
@@ -613,37 +606,6 @@ public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, lo
         return 0;
     }
 
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount, boolean writeToWAL) throws IOException {
-        if (check(row, family, qualifier, null)) {
-            Put put = new Put(row);
-            put.add(family, qualifier, Bytes.toBytes(amount));
-            put(put);
-            return amount;
-        }
-        long newValue = Bytes.toLong(data.get(row).get(family).get(qualifier).lastEntry().getValue()) + amount;
-        data.get(row).get(family).get(qualifier).put(System.currentTimeMillis(), Bytes.toBytes(newValue));
-        return newValue;
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    public boolean isAutoFlush() {
-        return true;
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    public void flushCommits() throws IOException {
-    }
-
     /**
      * {@inheritDoc}
      */
@@ -669,29 +631,6 @@ public CoprocessorRpcChannel coprocessorService(byte[] row) {
 
     }
 
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    public void setAutoFlush(boolean autoFlush) {
-        throw new NotImplementedException();
-
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    public void setAutoFlush(boolean autoFlush, boolean clearBufferOnFail) {
-        throw new NotImplementedException();
-
-    }
-
-    @Override
-    public void setAutoFlushTo(boolean autoFlush) {
-        throw new NotImplementedException();
-    }
-
     /**
      * {@inheritDoc}
      */
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
index ab18029048..d55edc35ce 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
@@ -21,7 +21,8 @@
 import java.io.IOException;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.rest.service.AclService;
 import org.apache.kylin.rest.service.QueryService;
@@ -57,11 +58,11 @@ public String prepareHBaseTable(Class<?> clazz) throws IOException {
     }
 
     @Override
-    public HTableInterface getTable(String tableName) throws IOException {
+    public Table getTable(String tableName) throws IOException {
         if (StringUtils.equals(tableName, aclTableName)) {
-            return HBaseConnection.get(hbaseUrl).getTable(aclTableName);
+            return HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(aclTableName));
         } else if (StringUtils.equals(tableName, userTableName)) {
-            return HBaseConnection.get(hbaseUrl).getTable(userTableName);
+            return HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
         } else {
             throw new IllegalStateException("getTable failed" + tableName);
         }
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java b/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
index d693a6738e..3e3efeccea 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
@@ -33,7 +33,7 @@
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
@@ -124,7 +124,7 @@ public void init() throws IOException {
     @Override
     public List<ObjectIdentity> findChildren(ObjectIdentity parentIdentity) {
         List<ObjectIdentity> oids = new ArrayList<ObjectIdentity>();
-        HTableInterface htable = null;
+        Table htable = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
 
@@ -173,7 +173,7 @@ public Acl readAclById(ObjectIdentity object, List<Sid> sids) throws NotFoundExc
     @Override
     public Map<ObjectIdentity, Acl> readAclsById(List<ObjectIdentity> oids, List<Sid> sids) throws NotFoundException {
         Map<ObjectIdentity, Acl> aclMaps = new HashMap<ObjectIdentity, Acl>();
-        HTableInterface htable = null;
+        Table htable = null;
         Result result = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
@@ -226,17 +226,16 @@ public MutableAcl createAcl(ObjectIdentity objectIdentity) throws AlreadyExistsE
         Authentication auth = SecurityContextHolder.getContext().getAuthentication();
         PrincipalSid sid = new PrincipalSid(auth);
 
-        HTableInterface htable = null;
+        Table htable = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
 
             Put put = new Put(Bytes.toBytes(String.valueOf(objectIdentity.getIdentifier())));
-            put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN), Bytes.toBytes(objectIdentity.getType()));
-            put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN), sidSerializer.serialize(new SidInfo(sid)));
-            put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN), Bytes.toBytes(true));
+            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN), Bytes.toBytes(objectIdentity.getType()));
+            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN), sidSerializer.serialize(new SidInfo(sid)));
+            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN), Bytes.toBytes(true));
 
             htable.put(put);
-            htable.flushCommits();
 
             logger.debug("ACL of " + objectIdentity + " created successfully.");
         } catch (IOException e) {
@@ -250,7 +249,7 @@ public MutableAcl createAcl(ObjectIdentity objectIdentity) throws AlreadyExistsE
 
     @Override
     public void deleteAcl(ObjectIdentity objectIdentity, boolean deleteChildren) throws ChildrenExistException {
-        HTableInterface htable = null;
+        Table htable = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
 
@@ -266,7 +265,6 @@ public void deleteAcl(ObjectIdentity objectIdentity, boolean deleteChildren) thr
             }
 
             htable.delete(delete);
-            htable.flushCommits();
 
             logger.debug("ACL of " + objectIdentity + " deleted successfully.");
         } catch (IOException e) {
@@ -284,7 +282,7 @@ public MutableAcl updateAcl(MutableAcl acl) throws NotFoundException {
             throw e;
         }
 
-        HTableInterface htable = null;
+        Table htable = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
 
@@ -295,17 +293,16 @@ public MutableAcl updateAcl(MutableAcl acl) throws NotFoundException {
             Put put = new Put(Bytes.toBytes(String.valueOf(acl.getObjectIdentity().getIdentifier())));
 
             if (null != acl.getParentAcl()) {
-                put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN), domainObjSerializer.serialize(new DomainObjectInfo(acl.getParentAcl().getObjectIdentity())));
+                put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN), domainObjSerializer.serialize(new DomainObjectInfo(acl.getParentAcl().getObjectIdentity())));
             }
 
             for (AccessControlEntry ace : acl.getEntries()) {
                 AceInfo aceInfo = new AceInfo(ace);
-                put.add(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY), Bytes.toBytes(aceInfo.getSidInfo().getSid()), aceSerializer.serialize(aceInfo));
+                put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY), Bytes.toBytes(aceInfo.getSidInfo().getSid()), aceSerializer.serialize(aceInfo));
             }
 
             if (!put.isEmpty()) {
                 htable.put(put);
-                htable.flushCommits();
 
                 logger.debug("ACL of " + acl.getObjectIdentity() + " updated successfully.");
             }
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
index 99e54b9e1b..6a2e9f52d0 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -28,9 +28,7 @@
 import java.util.Set;
 import java.util.WeakHashMap;
 
-import org.apache.commons.io.IOUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.cube.CubeInstance;
@@ -410,33 +408,24 @@ public HBaseResponse getHTableInfo(String tableName) throws IOException {
         if (htableInfoCache.containsKey(tableName)) {
             return htableInfoCache.get(tableName);
         }
-
-        Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
-        HTable table = null;
+        Connection conn = HBaseConnection.get(this.getConfig().getStorageUrl());
         HBaseResponse hr = null;
         long tableSize = 0;
         int regionCount = 0;
 
-        try {
-            table = new HTable(hconf, tableName);
-
-            HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(table);
-            Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
+        HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(tableName, conn);
+        Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
 
-            for (long s : sizeMap.values()) {
-                tableSize += s;
-            }
-
-            regionCount = sizeMap.size();
-
-            // Set response.
-            hr = new HBaseResponse();
-            hr.setTableSize(tableSize);
-            hr.setRegionCount(regionCount);
-        } finally {
-            IOUtils.closeQuietly(table);
+        for (long s : sizeMap.values()) {
+            tableSize += s;
         }
 
+        regionCount = sizeMap.size();
+
+        // Set response.
+        hr = new HBaseResponse();
+        hr.setTableSize(tableSize);
+        hr.setRegionCount(regionCount);
         htableInfoCache.put(tableName, hr);
 
         return hr;
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java b/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
index bc4d89ccd9..49b9b9f4a1 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/JobService.java
@@ -51,6 +51,9 @@
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.exception.BadRequestException;
+import org.apache.kylin.source.ISource;
+import org.apache.kylin.source.SourceFactory;
+import org.apache.kylin.source.SourcePartition;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -206,11 +209,12 @@ public JobInstance submitJob(CubeInstance cube, long startDate, long endDate, lo
         }
 
         checkCubeDescSignature(cube);
-
         DefaultChainedExecutable job;
-
         if (buildType == CubeBuildTypeEnum.BUILD) {
-            CubeSegment newSeg = getCubeManager().appendSegment(cube, startDate, endDate, startOffset, endOffset, sourcePartitionOffsetStart, sourcePartitionOffsetEnd);
+            ISource source = SourceFactory.tableSource(cube);
+            SourcePartition sourcePartition = new SourcePartition(startDate, endDate, startOffset, endOffset, sourcePartitionOffsetStart, sourcePartitionOffsetEnd);
+            sourcePartition = source.parsePartitionBeforeBuild(cube, sourcePartition);
+            CubeSegment newSeg = getCubeManager().appendSegment(cube, sourcePartition);
             job = EngineFactory.createBatchCubingJob(newSeg, submitter);
         } else if (buildType == CubeBuildTypeEnum.MERGE) {
             CubeSegment newSeg = getCubeManager().mergeSegments(cube, startDate, endDate, startOffset, endOffset, force);
@@ -364,15 +368,11 @@ public void resumeJob(JobInstance job) throws IOException, JobException {
 
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#job, 'ADMINISTRATION') or hasPermission(#job, 'OPERATION') or hasPermission(#job, 'MANAGEMENT')")
     public JobInstance cancelJob(JobInstance job) throws IOException, JobException {
-        //        CubeInstance cube = this.getCubeManager().getCube(job.getRelatedCube());
-        //        for (BuildCubeJob cubeJob: listAllCubingJobs(cube.getName(), null, EnumSet.of(ExecutableState.READY, ExecutableState.RUNNING))) {
-        //            getExecutableManager().stopJob(cubeJob.getId());
-        //        }
         CubeInstance cubeInstance = getCubeManager().getCube(job.getRelatedCube());
         final String segmentIds = job.getRelatedSegment();
         for (String segmentId : StringUtils.split(segmentIds)) {
             final CubeSegment segment = cubeInstance.getSegmentById(segmentId);
-            if (segment != null && segment.getStatus() == SegmentStatusEnum.NEW) {
+            if (segment != null && (segment.getStatus() == SegmentStatusEnum.NEW || segment.getDateRangeEnd() == 0)) {
                 // Remove this segments
                 CubeUpdate cubeBuilder = new CubeUpdate(cubeInstance);
                 cubeBuilder.setToRemoveSegs(segment);
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
index 94f2dd5db5..8765a11fd1 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -39,6 +39,7 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.UUID;
 
 import javax.annotation.PostConstruct;
 import javax.sql.DataSource;
@@ -46,11 +47,11 @@
 import org.apache.calcite.avatica.ColumnMetaData.Rep;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.debug.BackdoorToggles;
 import org.apache.kylin.common.util.Bytes;
@@ -72,7 +73,6 @@
 import org.apache.kylin.rest.request.PrepareSqlRequest;
 import org.apache.kylin.rest.request.SQLRequest;
 import org.apache.kylin.rest.response.SQLResponse;
-import org.apache.kylin.rest.util.QueryIdGenerator;
 import org.apache.kylin.rest.util.QueryUtil;
 import org.apache.kylin.rest.util.Serializer;
 import org.apache.kylin.rest.util.TableauInterceptor;
@@ -117,8 +117,6 @@
     private final String hbaseUrl;
     private final String userTableName;
 
-    private QueryIdGenerator queryIdGenerator = new QueryIdGenerator();
-
     @Autowired
     private CacheManager cacheManager;
 
@@ -163,14 +161,13 @@ public void saveQuery(final String creator, final Query query) throws IOExceptio
         Query[] queryArray = new Query[queries.size()];
 
         byte[] bytes = querySerializer.serialize(queries.toArray(queryArray));
-        HTableInterface htable = null;
+        Table htable = null;
         try {
-            htable = HBaseConnection.get(hbaseUrl).getTable(userTableName);
+            htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
             Put put = new Put(Bytes.toBytes(creator));
-            put.add(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
+            put.addColumn(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
 
             htable.put(put);
-            htable.flushCommits();
         } finally {
             IOUtils.closeQuietly(htable);
         }
@@ -196,14 +193,13 @@ public void removeQuery(final String creator, final String id) throws IOExceptio
 
         Query[] queryArray = new Query[queries.size()];
         byte[] bytes = querySerializer.serialize(queries.toArray(queryArray));
-        HTableInterface htable = null;
+        Table htable = null;
         try {
-            htable = HBaseConnection.get(hbaseUrl).getTable(userTableName);
+            htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
             Put put = new Put(Bytes.toBytes(creator));
-            put.add(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
+            put.addColumn(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
 
             htable.put(put);
-            htable.flushCommits();
         } finally {
             IOUtils.closeQuietly(htable);
         }
@@ -215,12 +211,12 @@ public void removeQuery(final String creator, final String id) throws IOExceptio
         }
 
         List<Query> queries = new ArrayList<Query>();
-        HTableInterface htable = null;
+        Table htable = null;
         try {
-            HConnection conn = HBaseConnection.get(hbaseUrl);
+            org.apache.hadoop.hbase.client.Connection conn = HBaseConnection.get(hbaseUrl);
             HBaseConnection.createHTableIfNeeded(conn, userTableName, USER_QUERY_FAMILY);
 
-            htable = conn.getTable(userTableName);
+            htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
             Get get = new Get(Bytes.toBytes(creator));
             get.addFamily(Bytes.toBytes(USER_QUERY_FAMILY));
             Result result = htable.get(get);
@@ -325,7 +321,7 @@ public SQLResponse doQueryWithCache(SQLRequest sqlRequest) {
             throw new InternalErrorException("Query is not allowed in " + serverMode + " mode.");
         }
 
-        final String queryId = queryIdGenerator.nextId(sqlRequest.getProject());
+        final String queryId = UUID.randomUUID().toString();
 
         Map<String, String> toggles = new HashMap<>();
         toggles.put(BackdoorToggles.KEY_QUERY_ID, queryId);
@@ -334,7 +330,7 @@ public SQLResponse doQueryWithCache(SQLRequest sqlRequest) {
         }
         BackdoorToggles.setToggles(toggles);
 
-        try (SetThreadName ignored = new SetThreadName("Query-%s", queryId)) {
+        try (SetThreadName ignored = new SetThreadName("Query %s", queryId)) {
             String sql = sqlRequest.getSql();
             String project = sqlRequest.getProject();
             logger.info("Using project: " + project);
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java b/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
index 07c7c6ff7c..ab54882609 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
@@ -30,11 +30,11 @@
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.rest.security.AclHBaseStorage;
@@ -72,7 +72,7 @@ public void init() throws IOException {
 
     @Override
     public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
-        HTableInterface htable = null;
+        Table htable = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);
 
@@ -144,16 +144,16 @@ public void createUser(UserDetails user) {
 
     @Override
     public void updateUser(UserDetails user) {
-        HTableInterface htable = null;
+        Table htable = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);
 
             Pair<byte[], byte[]> pair = userToHBaseRow(user);
             Put put = new Put(pair.getKey());
-            put.add(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN), pair.getSecond());
+
+            put.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN), pair.getSecond());
 
             htable.put(put);
-            htable.flushCommits();
         } catch (IOException e) {
             throw new RuntimeException(e.getMessage(), e);
         } finally {
@@ -163,14 +163,13 @@ public void updateUser(UserDetails user) {
 
     @Override
     public void deleteUser(String username) {
-        HTableInterface htable = null;
+        Table htable = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);
 
             Delete delete = new Delete(Bytes.toBytes(username));
 
             htable.delete(delete);
-            htable.flushCommits();
         } catch (IOException e) {
             throw new RuntimeException(e.getMessage(), e);
         } finally {
@@ -185,7 +184,7 @@ public void changePassword(String oldPassword, String newPassword) {
 
     @Override
     public boolean userExists(String username) {
-        HTableInterface htable = null;
+        Table htable = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);
 
@@ -216,7 +215,7 @@ public boolean userExists(String username) {
         s.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN));
 
         List<UserDetails> all = new ArrayList<UserDetails>();
-        HTableInterface htable = null;
+        Table htable = null;
         ResultScanner scanner = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);
diff --git a/server-base/src/main/java/org/apache/kylin/rest/util/KeywordDefaultDirtyHack.java b/server-base/src/main/java/org/apache/kylin/rest/util/KeywordDefaultDirtyHack.java
index 8d8d971842..d8c8845cac 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/util/KeywordDefaultDirtyHack.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/util/KeywordDefaultDirtyHack.java
@@ -27,6 +27,7 @@ public String transform(String sql) {
         // KYLIN-2108, DEFAULT is hive default database, but a sql keyword too, needs quote
         sql = sql.replace("DEFAULT.", "\"DEFAULT\".");
         sql = sql.replace("default.", "\"default\".");
+        sql = sql.replace("defaultCatalog.", "");
 
         return sql;
     }
diff --git a/server-base/src/main/java/org/apache/kylin/rest/util/QueryIdGenerator.java b/server-base/src/main/java/org/apache/kylin/rest/util/QueryIdGenerator.java
deleted file mode 100644
index 2dd19c20a5..0000000000
--- a/server-base/src/main/java/org/apache/kylin/rest/util/QueryIdGenerator.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.rest.util;
-
-import org.apache.commons.lang3.time.FastDateFormat;
-
-import javax.annotation.Nonnull;
-import javax.annotation.concurrent.ThreadSafe;
-import java.util.concurrent.ThreadLocalRandom;
-
-@ThreadSafe
-public class QueryIdGenerator {
-    private static final char[] base26 = "abcdefghijklmnopqrstuvwxyz".toCharArray();
-    private static final FastDateFormat dateFormat = FastDateFormat.getInstance("yyyyMMdd_HHmmss");
-
-    /**
-     * @param project name of the project
-     * @return the next query id. We try to generate unique id as much as possible, but don't guarantee it.
-     */
-    @Nonnull
-    public String nextId(final String project) {
-        char[] postfix = new char[6];
-        for (int i = 0; i < postfix.length; i++) {
-            postfix[i] = base26[ThreadLocalRandom.current().nextInt(base26.length)];
-        }
-
-        return new String(postfix);
-        //return String.format("%s_%s_%s", dateFormat.format(System.currentTimeMillis()), project, new String(postfix));
-        //disabled testcase: org.apache.kylin.rest.util.QueryIdGeneratorTest.testIdFormat()
-    }
-}
diff --git a/server-base/src/test/java/org/apache/kylin/rest/util/QueryIdGeneratorTest.java b/server-base/src/test/java/org/apache/kylin/rest/util/QueryIdGeneratorTest.java
deleted file mode 100644
index dc82c17274..0000000000
--- a/server-base/src/test/java/org/apache/kylin/rest/util/QueryIdGeneratorTest.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.rest.util;
-
-import org.junit.Assert;
-import org.junit.Ignore;
-import org.junit.Test;
-
-import java.util.HashSet;
-import java.util.Set;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-
-public class QueryIdGeneratorTest {
-
-    @Ignore
-    @Test
-    public void testIdFormat() {
-        QueryIdGenerator generator = new QueryIdGenerator();
-        for (int i = 0; i < 100; i++) {
-            String queryId = generator.nextId("project");
-            Assert.assertTrue(queryId.contains("project"));
-        }
-    }
-
-    @Test
-    public void testIdUniqueness() {
-        QueryIdGenerator generator = new QueryIdGenerator();
-        Set<String> idSet = new HashSet<>();
-
-        for (int i = 0; i < 1000; i++) {
-            idSet.add(generator.nextId("test"));
-        }
-
-        Assert.assertEquals(1000, idSet.size());
-    }
-
-    @Test
-    public void testSingleThreadThroughput() {
-        int N = 1_000_000;
-        long millis = new GenIdTask(new QueryIdGenerator(), N).call();
-
-        // ops / second
-        double throughput = (N * 1000.0) / millis;
-        System.out.format("QueryIdGenerator single thread throughput: %d ops/second\n", (int) throughput);
-    }
-
-    @Test
-    public void testMultiThreadsThroughput() throws ExecutionException, InterruptedException {
-        QueryIdGenerator generator = new QueryIdGenerator();
-        int N = 1_000_000;
-
-        final int numThreads = 4;
-        ExecutorService pool = Executors.newFixedThreadPool(numThreads);
-        Future[] futures = new Future[numThreads];
-
-        for (int i = 0; i < numThreads; i++) {
-            futures[i] = pool.submit(new GenIdTask(generator, N));
-        }
-
-        long sumMillis = 0;
-        for (int i = 0; i < numThreads; i++) {
-            sumMillis += (long) futures[i].get();
-        }
-        pool.shutdown();
-
-        double avgThroughputPerThread = (N * 1000.0) / (sumMillis / (double) numThreads);
-        System.out.format("QueryIdGenerator multi threads throughput: %d ops/second\n", (int) avgThroughputPerThread);
-    }
-
-    private static class GenIdTask implements Callable<Long> {
-        private final QueryIdGenerator generator;
-        private final int N;
-
-        GenIdTask(QueryIdGenerator generator, int N) {
-            this.generator = generator;
-            this.N = N;
-        }
-
-        @Override
-        public Long call() {
-            long start = System.currentTimeMillis();
-            for (int i = 0; i < N; i++) {
-                generator.nextId("test");
-            }
-            return System.currentTimeMillis() - start;
-        }
-    }
-}
\ No newline at end of file
diff --git a/server/pom.xml b/server/pom.xml
index 43453a84fd..df3256a773 100644
--- a/server/pom.xml
+++ b/server/pom.xml
@@ -17,8 +17,7 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
     <artifactId>kylin-server</artifactId>
@@ -29,7 +28,7 @@
     <parent>
         <groupId>org.apache.kylin</groupId>
         <artifactId>kylin</artifactId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
     </parent>
 
     <dependencies>
@@ -112,6 +111,10 @@
                     <groupId>javax.servlet</groupId>
                     <artifactId>servlet-api</artifactId>
                 </exclusion>
+	        <exclusion>
+		    <groupId>com.google.protobuf</groupId>
+		    <artifactId>protobuf-java</artifactId>
+	        </exclusion>
                 <exclusion>
                     <groupId>javax.servlet.jsp</groupId>
                     <artifactId>jsp-api</artifactId>
@@ -131,6 +134,10 @@
                     <groupId>javax.servlet.jsp</groupId>
                     <artifactId>jsp-api</artifactId>
                 </exclusion>
+                <exclusion>
+                    <groupId>com.google.protobuf</groupId>
+                    <artifactId>protobuf-java</artifactId>
+                </exclusion>
             </exclusions>
         </dependency>
         <dependency>
@@ -146,6 +153,10 @@
                     <groupId>javax.servlet.jsp</groupId>
                     <artifactId>jsp-api</artifactId>
                 </exclusion>
+                <exclusion>
+                    <groupId>com.google.protobuf</groupId>
+                    <artifactId>protobuf-java</artifactId>
+                </exclusion>
             </exclusions>
         </dependency>
         <dependency>
@@ -161,6 +172,10 @@
                     <groupId>javax.servlet.jsp</groupId>
                     <artifactId>jsp-api</artifactId>
                 </exclusion>
+                <exclusion>
+                    <groupId>com.google.protobuf</groupId>
+                    <artifactId>protobuf-java</artifactId>
+                </exclusion>
             </exclusions>
         </dependency>
         <dependency>
@@ -176,6 +191,10 @@
                     <groupId>javax.servlet.jsp</groupId>
                     <artifactId>jsp-api</artifactId>
                 </exclusion>
+                <exclusion>
+                    <groupId>com.google.protobuf</groupId>
+                    <artifactId>protobuf-java</artifactId>
+                </exclusion>
             </exclusions>
         </dependency>
         <dependency>
@@ -199,6 +218,10 @@
                     <groupId>javax.servlet.jsp</groupId>
                     <artifactId>jsp-api</artifactId>
                 </exclusion>
+                <exclusion>
+                    <groupId>com.google.protobuf</groupId>
+                    <artifactId>protobuf-java</artifactId>
+                </exclusion>
             </exclusions>
         </dependency>
         <dependency>
@@ -214,6 +237,10 @@
                     <groupId>javax.servlet.jsp</groupId>
                     <artifactId>jsp-api</artifactId>
                 </exclusion>
+                <exclusion>
+                    <groupId>com.google.protobuf</groupId>
+                    <artifactId>protobuf-java</artifactId>
+                </exclusion>
             </exclusions>
         </dependency>
         <dependency>
@@ -229,6 +256,10 @@
                     <groupId>javax.servlet.jsp</groupId>
                     <artifactId>jsp-api</artifactId>
                 </exclusion>
+                <exclusion>
+                    <groupId>com.google.protobuf</groupId>
+                    <artifactId>protobuf-java</artifactId>
+                </exclusion>
             </exclusions>
         </dependency>
 
@@ -262,6 +293,10 @@
                     <groupId>javax.servlet.jsp</groupId>
                     <artifactId>jsp-api</artifactId>
                 </exclusion>
+                <exclusion>
+                    <groupId>com.google.protobuf</groupId>
+                    <artifactId>protobuf-java</artifactId>
+                </exclusion>
             </exclusions>
         </dependency>
         <dependency>
diff --git a/server/src/main/resources/kylinSecurity.xml b/server/src/main/resources/kylinSecurity.xml
index 40481cebb4..332b026842 100644
--- a/server/src/main/resources/kylinSecurity.xml
+++ b/server/src/main/resources/kylinSecurity.xml
@@ -89,6 +89,7 @@
 							<constructor-arg index="1" value="${ldap.user.groupSearchBase}" />
 							<constructor-arg index="2" value="${acl.adminRole}" />
 							<constructor-arg index="3" value="${acl.defaultRole}" />
+							<constructor-arg index="4" value="${ldap.user.groupSearchFilter}" />
 						</bean>
 					</constructor-arg>
 				</bean>
@@ -116,6 +117,7 @@
 							<constructor-arg index="1" value="${ldap.service.groupSearchBase}" />
 							<constructor-arg index="2" value="${acl.adminRole}" />
 							<constructor-arg index="3" value="${acl.defaultRole}" />
+							<constructor-arg index="4" value="${ldap.service.groupSearchFilter}" />
 						</bean>
 					</constructor-arg>
 				</bean>
diff --git a/source-hive/pom.xml b/source-hive/pom.xml
index 08019d0d79..79ced8c09d 100644
--- a/source-hive/pom.xml
+++ b/source-hive/pom.xml
@@ -17,8 +17,7 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
     <artifactId>kylin-source-hive</artifactId>
@@ -28,7 +27,7 @@
     <parent>
         <groupId>org.apache.kylin</groupId>
         <artifactId>kylin</artifactId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
 
     </parent>
 
@@ -64,6 +63,11 @@
             <artifactId>hive-hcatalog-core</artifactId>
             <scope>provided</scope>
         </dependency>
+        <dependency>
+            <groupId>org.apache.hive</groupId>
+            <artifactId>hive-jdbc</artifactId>
+            <scope>provided</scope>
+        </dependency>
         <dependency>
             <groupId>org.apache.mrunit</groupId>
             <artifactId>mrunit</artifactId>
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineHiveClient.java b/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineHiveClient.java
new file mode 100644
index 0000000000..dc36c4f6fc
--- /dev/null
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineHiveClient.java
@@ -0,0 +1,220 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.hive;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.List;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+import org.apache.kylin.common.util.DBUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+
+public class BeelineHiveClient implements IHiveClient {
+    private static final Logger logger = LoggerFactory.getLogger(BeelineHiveClient.class);
+
+    private Connection cnct;
+    private Statement stmt;
+    private DatabaseMetaData metaData;
+
+    public BeelineHiveClient(String beelineParams) {
+        if (StringUtils.isEmpty(beelineParams)) {
+            throw new IllegalArgumentException("BeelineParames cannot be empty");
+        }
+        String[] splits = StringUtils.split(beelineParams);
+        logger.debug("beelineParams:{}", beelineParams);
+        String url = null, username = null, password = null, pwdFilePath = null;
+        for (int i = 0; i < splits.length; i++) {
+            if ("-u".equals(splits[i])) {
+                url = stripQuotes(splits[i + 1]);
+            }
+            if ("-n".equals(splits[i])) {
+                username = stripQuotes(splits[i + 1]);
+            }
+            if ("-p".equals(splits[i])) {
+                password = stripQuotes(splits[i + 1]);
+            }
+            if ("-w".equals(splits[i])) {
+                pwdFilePath = stripQuotes(splits[i + 1]);
+                try {
+                    password = StringUtils.trim(FileUtils.readFileToString(new File(pwdFilePath), Charset.defaultCharset()));
+                } catch (IOException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+        this.init(url, username, password);
+    }
+
+    private void init(String url, String username, String password) {
+        try {
+            Class.forName("org.apache.hive.jdbc.HiveDriver");
+            cnct = DriverManager.getConnection(url, username, password);
+            stmt = cnct.createStatement();
+            metaData = cnct.getMetaData();
+        } catch (SQLException | ClassNotFoundException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    private String stripQuotes(String input) {
+        if (input.startsWith("'") && input.endsWith("'")) {
+            return StringUtils.strip(input, "'");
+        } else if (input.startsWith("\"") && input.endsWith("\"")) {
+            return StringUtils.strip(input, "\"");
+        } else {
+            return input;
+        }
+    }
+
+    public List<String> getHiveDbNames() throws Exception {
+        List<String> ret = Lists.newArrayList();
+        ResultSet schemas = metaData.getSchemas();
+        while (schemas.next()) {
+            ret.add(String.valueOf(schemas.getObject(1)));
+        }
+        DBUtils.closeQuietly(schemas);
+        return ret;
+    }
+
+    public List<String> getHiveTableNames(String database) throws Exception {
+        List<String> ret = Lists.newArrayList();
+        ResultSet tables = metaData.getTables(null, database, null, null);
+        while (tables.next()) {
+            ret.add(String.valueOf(tables.getObject(3)));
+        }
+        DBUtils.closeQuietly(tables);
+        return ret;
+    }
+
+    @Override
+    public void executeHQL(String hql) throws CommandNeedRetryException, IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public void executeHQL(String[] hqls) throws CommandNeedRetryException, IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    public HiveTableMeta getHiveTableMeta(String database, String tableName) throws SQLException {
+        ResultSet columns = metaData.getColumns(null, database, tableName, null);
+        HiveTableMetaBuilder builder = new HiveTableMetaBuilder();
+        builder.setTableName(tableName);
+
+        List<HiveTableMeta.HiveTableColumnMeta> allColumns = Lists.newArrayList();
+        while (columns.next()) {
+            allColumns.add(new HiveTableMeta.HiveTableColumnMeta(columns.getString(4), columns.getString(6), columns.getString(12)));
+        }
+        builder.setAllColumns(allColumns);
+        DBUtils.closeQuietly(columns);
+        stmt.execute("use " + database);
+        ResultSet resultSet = stmt.executeQuery("describe formatted " + tableName);
+        extractHiveTableMeta(resultSet, builder);
+        DBUtils.closeQuietly(resultSet);
+        return builder.createHiveTableMeta();
+    }
+
+    private void extractHiveTableMeta(ResultSet resultSet, HiveTableMetaBuilder builder) throws SQLException {
+        while (resultSet.next()) {
+
+            List<HiveTableMeta.HiveTableColumnMeta> partitionColumns = Lists.newArrayList();
+            if ("# Partition Information".equals(resultSet.getString(1).trim())) {
+                resultSet.next();
+                Preconditions.checkArgument("# col_name".equals(resultSet.getString(1).trim()));
+                resultSet.next();
+                Preconditions.checkArgument("".equals(resultSet.getString(1).trim()));
+                while (resultSet.next()) {
+                    if ("".equals(resultSet.getString(1).trim())) {
+                        break;
+                    }
+                    partitionColumns.add(new HiveTableMeta.HiveTableColumnMeta(resultSet.getString(1).trim(), resultSet.getString(2).trim(), resultSet.getString(3).trim()));
+                }
+                builder.setPartitionColumns(partitionColumns);
+            }
+
+            if ("Owner:".equals(resultSet.getString(1).trim())) {
+                builder.setOwner(resultSet.getString(2).trim());
+            }
+            if ("LastAccessTime:".equals(resultSet.getString(1).trim())) {
+                try {
+                    int i = Integer.parseInt(resultSet.getString(2).trim());
+                    builder.setLastAccessTime(i);
+                } catch (NumberFormatException e) {
+                    builder.setLastAccessTime(0);
+                }
+            }
+            if ("Location:".equals(resultSet.getString(1).trim())) {
+                builder.setSdLocation(resultSet.getString(2).trim());
+            }
+            if ("Table Type:".equals(resultSet.getString(1).trim())) {
+                builder.setTableType(resultSet.getString(2).trim());
+            }
+            if ("Table Parameters:".equals(resultSet.getString(1).trim())) {
+                while (resultSet.next()) {
+                    if (resultSet.getString(2) == null) {
+                        break;
+                    }
+                    if ("storage_handler".equals(resultSet.getString(2).trim())) {
+                        builder.setIsNative(false);//default is true
+                    }
+                    if ("totalSize".equals(resultSet.getString(2).trim())) {
+                        builder.setFileSize(Long.parseLong(resultSet.getString(3).trim()));//default is false
+                    }
+                    if ("numFiles".equals(resultSet.getString(2).trim())) {
+                        builder.setFileNum(Long.parseLong(resultSet.getString(3).trim()));
+                    }
+                }
+            }
+            if ("InputFormat:".equals(resultSet.getString(1).trim())) {
+                builder.setSdInputFormat(resultSet.getString(2).trim());
+            }
+            if ("OutputFormat:".equals(resultSet.getString(1).trim())) {
+                builder.setSdOutputFormat(resultSet.getString(2).trim());
+            }
+        }
+    }
+
+    public void close() {
+        DBUtils.closeQuietly(stmt);
+        DBUtils.closeQuietly(cnct);
+    }
+
+    public static void main(String[] args) throws SQLException {
+
+        BeelineHiveClient loader = new BeelineHiveClient("-n root --hiveconf hive.security.authorization.sqlstd.confwhitelist.append='mapreduce.job.*|dfs.*' -u 'jdbc:hive2://sandbox:10000'");
+        //BeelineHiveClient loader = new BeelineHiveClient(StringUtils.join(args, " "));
+        HiveTableMeta hiveTableMeta = loader.getHiveTableMeta("default", "test_kylin_fact_part");
+        System.out.println(hiveTableMeta);
+        loader.close();
+    }
+}
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineOptionsProcessor.java b/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineOptionsProcessor.java
new file mode 100644
index 0000000000..68cb3528ea
--- /dev/null
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/BeelineOptionsProcessor.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.hive;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+
+public class BeelineOptionsProcessor {
+    private final Options options = new Options();
+
+    public BeelineOptionsProcessor() {
+
+        options.addOption(OptionBuilder.hasArg().withArgName("url").create('u'));
+        options.addOption(OptionBuilder.hasArg().withArgName("username").create('n'));
+        options.addOption(OptionBuilder.hasArg().withArgName("password").create('p'));
+
+    }
+
+    public CommandLine process(String[] argv) {
+        try {
+            return new GnuParser().parse(options, argv);
+
+        } catch (ParseException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+}
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java b/source-hive/src/main/java/org/apache/kylin/source/hive/CLIHiveClient.java
similarity index 59%
rename from source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
rename to source-hive/src/main/java/org/apache/kylin/source/hive/CLIHiveClient.java
index a99b304499..60cf47aeab 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClient.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/CLIHiveClient.java
@@ -21,7 +21,6 @@
 import java.io.IOException;
 import java.util.List;
 import java.util.Map;
-import java.util.Map.Entry;
 
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.common.StatsSetupConst;
@@ -35,61 +34,27 @@
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
+import com.google.common.collect.Lists;
+
 /**
  * Hive meta API client for Kylin
  * @author shaoshi
  *
  */
-public class HiveClient {
+public class CLIHiveClient implements IHiveClient {
 
     protected HiveConf hiveConf = null;
     protected Driver driver = null;
     protected HiveMetaStoreClient metaStoreClient = null;
 
-    public HiveClient() {
-        hiveConf = new HiveConf(HiveClient.class);
-    }
-
-    public HiveClient(Map<String, String> configMap) {
-        this();
-        appendConfiguration(configMap);
-    }
-
-    public HiveConf getHiveConf() {
-        return hiveConf;
+    public CLIHiveClient() {
+        hiveConf = new HiveConf(CLIHiveClient.class);
     }
 
     /**
-     * Get the hive ql driver to execute ddl or dml
-     * @return
-     */
-    private Driver getDriver() {
-        if (driver == null) {
-            driver = new Driver(hiveConf);
-            SessionState.start(new CliSessionState(hiveConf));
-        }
-
-        return driver;
-    }
-
-    /**
-     * Append or overwrite the default hive client configuration; You need call this before invoke #executeHQL;
-     * @param configMap
-     */
-    public void appendConfiguration(Map<String, String> configMap) {
-        if (configMap != null && configMap.size() > 0) {
-            for (Entry<String, String> e : configMap.entrySet()) {
-                hiveConf.set(e.getKey(), e.getValue());
-            }
-        }
-    }
-
-    /**
-     * 
-     * @param hql
-     * @throws CommandNeedRetryException
-     * @throws IOException
+     * only used by Deploy Util
      */
+    @Override
     public void executeHQL(String hql) throws CommandNeedRetryException, IOException {
         CommandProcessorResponse response = getDriver().run(hql);
         int retCode = response.getResponseCode();
@@ -99,47 +64,72 @@ public void executeHQL(String hql) throws CommandNeedRetryException, IOException
         }
     }
 
+    /**
+     * only used by Deploy Util
+     */
+    @Override
     public void executeHQL(String[] hqls) throws CommandNeedRetryException, IOException {
         for (String sql : hqls)
             executeHQL(sql);
     }
 
-    private HiveMetaStoreClient getMetaStoreClient() throws Exception {
-        if (metaStoreClient == null) {
-            metaStoreClient = new HiveMetaStoreClient(hiveConf);
-        }
-        return metaStoreClient;
-    }
-
-    public Table getHiveTable(String database, String tableName) throws Exception {
-        return getMetaStoreClient().getTable(database, tableName);
-    }
-
-    public List<FieldSchema> getHiveTableFields(String database, String tableName) throws Exception {
-        return getMetaStoreClient().getFields(database, tableName);
-    }
+    @Override
+    public HiveTableMeta getHiveTableMeta(String database, String tableName) throws Exception {
+        HiveTableMetaBuilder builder = new HiveTableMetaBuilder();
+        Table table = getMetaStoreClient().getTable(database, tableName);
 
-    public String getHiveTableLocation(String database, String tableName) throws Exception {
-        Table t = getHiveTable(database, tableName);
-        return t.getSd().getLocation();
-    }
+        List<FieldSchema> allFields = getMetaStoreClient().getFields(database, tableName);
+        List<FieldSchema> partitionFields = table.getPartitionKeys();
+        if (allFields == null) {
+            allFields = Lists.newArrayList();
+        }
+        if (partitionFields != null && partitionFields.size() > 0) {
+            allFields.addAll(partitionFields);
+        }
+        List<HiveTableMeta.HiveTableColumnMeta> allColumns = Lists.newArrayList();
+        List<HiveTableMeta.HiveTableColumnMeta> partitionColumns = Lists.newArrayList();
+        for (FieldSchema fieldSchema : allFields) {
+            allColumns.add(new HiveTableMeta.HiveTableColumnMeta(fieldSchema.getName(), fieldSchema.getType(), fieldSchema.getComment()));
+        }
+        if (partitionFields != null && partitionFields.size() > 0) {
+            for (FieldSchema fieldSchema : partitionFields) {
+                partitionColumns.add(new HiveTableMeta.HiveTableColumnMeta(fieldSchema.getName(), fieldSchema.getType(), fieldSchema.getComment()));
+            }
+        }
+        builder.setAllColumns(allColumns);
+        builder.setPartitionColumns(partitionColumns);
 
-    public long getFileSizeForTable(Table table) {
-        return getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.TOTAL_SIZE);
-    }
+        builder.setSdLocation(table.getSd().getLocation());
+        builder.setFileSize(getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.TOTAL_SIZE));
+        builder.setFileNum(getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.NUM_FILES));
+        builder.setIsNative(!MetaStoreUtils.isNonNativeTable(table));
+        builder.setTableName(tableName);
+        builder.setSdInputFormat(table.getSd().getInputFormat());
+        builder.setSdOutputFormat(table.getSd().getOutputFormat());
+        builder.setOwner(table.getOwner());
+        builder.setLastAccessTime(table.getLastAccessTime());
+        builder.setTableType(table.getTableType());
 
-    public long getFileNumberForTable(Table table) {
-        return getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.NUM_FILES);
+        return builder.createHiveTableMeta();
     }
 
+    @Override
     public List<String> getHiveDbNames() throws Exception {
         return getMetaStoreClient().getAllDatabases();
     }
 
+    @Override
     public List<String> getHiveTableNames(String database) throws Exception {
         return getMetaStoreClient().getAllTables(database);
     }
 
+    private HiveMetaStoreClient getMetaStoreClient() throws Exception {
+        if (metaStoreClient == null) {
+            metaStoreClient = new HiveMetaStoreClient(hiveConf);
+        }
+        return metaStoreClient;
+    }
+
     /**
      * COPIED FROM org.apache.hadoop.hive.ql.stats.StatsUtil for backward compatibility
      * 
@@ -150,7 +140,7 @@ public long getFileNumberForTable(Table table) {
      *          - type of stats
      * @return value of stats
      */
-    public static long getBasicStatForTable(org.apache.hadoop.hive.ql.metadata.Table table, String statType) {
+    private long getBasicStatForTable(org.apache.hadoop.hive.ql.metadata.Table table, String statType) {
         Map<String, String> params = table.getParameters();
         long result = 0;
 
@@ -164,7 +154,16 @@ public static long getBasicStatForTable(org.apache.hadoop.hive.ql.metadata.Table
         return result;
     }
 
-    public boolean isNativeTable(String database, String tableName) throws Exception {
-        return !MetaStoreUtils.isNonNativeTable(getMetaStoreClient().getTable(database, tableName));
+    /**
+     * Get the hive ql driver to execute ddl or dml
+     * @return
+     */
+    private Driver getDriver() {
+        if (driver == null) {
+            driver = new Driver(hiveConf);
+            SessionState.start(new CliSessionState(hiveConf));
+        }
+
+        return driver;
     }
 }
diff --git a/core-job/src/test/java/org/apache/kylin/job/DiscardedTestExecutable.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClientFactory.java
similarity index 57%
rename from core-job/src/test/java/org/apache/kylin/job/DiscardedTestExecutable.java
rename to source-hive/src/main/java/org/apache/kylin/source/hive/HiveClientFactory.java
index 9362e18cd6..8c883afaff 100644
--- a/core-job/src/test/java/org/apache/kylin/job/DiscardedTestExecutable.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveClientFactory.java
@@ -6,36 +6,28 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *  
  *     http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *  
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
-*/
-
-package org.apache.kylin.job;
-
-import org.apache.kylin.job.exception.ExecuteException;
-import org.apache.kylin.job.execution.ExecutableContext;
-import org.apache.kylin.job.execution.ExecuteResult;
-
-/**
  */
-public class DiscardedTestExecutable extends BaseTestExecutable {
 
-    public DiscardedTestExecutable() {
-        super();
-    }
+package org.apache.kylin.source.hive;
+
+import org.apache.kylin.common.KylinConfig;
 
-    @Override
-    protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
-        try {
-            Thread.sleep(1000);
-        } catch (InterruptedException e) {
+public class HiveClientFactory {
+    public static IHiveClient getHiveClient() {
+        if ("cli".equals(KylinConfig.getInstanceFromEnv().getHiveClientMode())) {
+            return new CLIHiveClient();
+        } else if ("beeline".equals(KylinConfig.getInstanceFromEnv().getHiveClientMode())) {
+            return new BeelineHiveClient(KylinConfig.getInstanceFromEnv().getHiveBeelineParams());
+        } else {
+            throw new RuntimeException("cannot recognize hive client mode");
         }
-        return new ExecuteResult(ExecuteResult.State.DISCARDED, "discarded");
     }
 }
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveCmdBuilder.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveCmdBuilder.java
index 5a5b4e0577..bce85b8fd3 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveCmdBuilder.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveCmdBuilder.java
@@ -80,7 +80,7 @@ public String build() {
 
                 logger.info("The statements to execute in beeline: \n" + hqlBuf);
                 if (logger.isDebugEnabled()) {
-                    logger.debug("THe SQL to execute in beeline: \n" + IOUtils.toString(new FileReader(tmpHql)));
+                    logger.debug("The SQL to execute in beeline: \n" + IOUtils.toString(new FileReader(tmpHql)));
                 }
             } catch (IOException e) {
                 throw new RuntimeException(e);
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
index f536cbbc19..c2a48162ab 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveMRInput.java
@@ -26,6 +26,7 @@
 
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.Job;
@@ -330,10 +331,22 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio
             KylinConfig config = getCubeSpecificConfig();
 
             try {
-
                 computeRowCount(config.getCliCommandExecutor());
+                Path rowCountFile = null;
+                Path rowCountFolder = new Path(getRowCountOutputDir());
+                FileSystem fs = FileSystem.get(rowCountFolder.toUri(), HadoopUtil.getCurrentConfiguration());
+                for (FileStatus stat : fs.listStatus(rowCountFolder)) {
+                    if (stat.isDirectory() == false && stat.getPath().getName().startsWith("0000")) {
+                        rowCountFile = stat.getPath();
+                        logger.debug("Finding file " + rowCountFile);
+                        break;
+                    }
+                }
+
+                if (rowCountFile == null) {
+                    return new ExecuteResult(ExecuteResult.State.ERROR, "No row count file found in '" + getRowCountOutputDir() + "'");
+                }
 
-                Path rowCountFile = new Path(getRowCountOutputDir(), "000000_0");
                 long rowCount = readRowCountFromFile(rowCountFile);
                 if (!config.isEmptySegmentAllowed() && rowCount == 0) {
                     stepLogger.log("Detect upstream hive table is empty, " + "fail the job because \"kylin.job.allow.empty.segment\" = \"false\"");
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSource.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSource.java
index e9cebea2f4..af0a5197e6 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSource.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSource.java
@@ -18,13 +18,18 @@
 
 package org.apache.kylin.source.hive;
 
-import com.google.common.collect.Lists;
+import java.util.List;
+
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.engine.mr.IMRInput;
+import org.apache.kylin.metadata.model.IBuildable;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.source.ISource;
 import org.apache.kylin.source.ReadableTable;
 
-import java.util.List;
+import com.google.common.collect.Lists;
+import org.apache.kylin.source.SourcePartition;
 
 //used by reflection
 public class HiveSource implements ISource {
@@ -49,4 +54,27 @@ public ReadableTable createReadableTable(TableDesc tableDesc) {
         return Lists.newArrayList();
     }
 
+    @Override
+    public SourcePartition parsePartitionBeforeBuild(IBuildable buildable, SourcePartition srcPartition) {
+        SourcePartition result = SourcePartition.getCopyOf(srcPartition);
+        CubeInstance cube = (CubeInstance) buildable;
+        if (cube.getDescriptor().getModel().getPartitionDesc().isPartitioned() == true) {
+            // normal partitioned cube
+            if (result.getStartDate() == 0) {
+                final CubeSegment last = cube.getLastSegment();
+                if (last != null) {
+                    result.setStartDate(last.getDateRangeEnd());
+                }
+            }
+        } else {
+            // full build
+            result.setStartDate(0);
+            result.setEndDate(Long.MAX_VALUE);
+        }
+
+        result.setStartOffset(0);
+        result.setEndOffset(0);
+        return result;
+    }
+
 }
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
index ec9aedb808..401e72046d 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveSourceTableLoader.java
@@ -25,8 +25,6 @@
 import java.util.Set;
 import java.util.UUID;
 
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.engine.mr.HadoopUtil;
@@ -62,7 +60,7 @@
             db2tables.put(parts[0], parts[1]);
         }
 
-        HiveClient hiveClient = new HiveClient();
+        IHiveClient hiveClient = HiveClientFactory.getHiveClient();
         SchemaChecker checker = new SchemaChecker(hiveClient, MetadataManager.getInstance(config), CubeManager.getInstance(config));
         for (Map.Entry<String, String> entry : db2tables.entries()) {
             SchemaChecker.CheckResult result = checker.allowReload(entry.getKey(), entry.getValue());
@@ -85,29 +83,18 @@ public static void unLoadHiveTable(String hiveTable) throws IOException {
         metaMgr.removeTableExd(hiveTable);
     }
 
-    private static List<String> extractHiveTables(String database, Set<String> tables, HiveClient hiveClient) throws IOException {
+    private static List<String> extractHiveTables(String database, Set<String> tables, IHiveClient hiveClient) throws IOException {
 
         List<String> loadedTables = Lists.newArrayList();
         MetadataManager metaMgr = MetadataManager.getInstance(KylinConfig.getInstanceFromEnv());
         for (String tableName : tables) {
-            Table table = null;
-            List<FieldSchema> partitionFields = null;
-            List<FieldSchema> fields = null;
+            HiveTableMeta hiveTableMeta;
             try {
-                table = hiveClient.getHiveTable(database, tableName);
-                partitionFields = table.getPartitionKeys();
-                fields = hiveClient.getHiveTableFields(database, tableName);
+                hiveTableMeta = hiveClient.getHiveTableMeta(database, tableName);
             } catch (Exception e) {
-                e.printStackTrace();
-                throw new IOException(e);
+                throw new RuntimeException("cannot get HiveTableMeta", e);
             }
 
-            if (fields != null && partitionFields != null && partitionFields.size() > 0) {
-                fields.addAll(partitionFields);
-            }
-
-            long tableSize = hiveClient.getFileSizeForTable(table);
-            long tableFileNum = hiveClient.getFileNumberForTable(table);
             TableDesc tableDesc = metaMgr.getTableDesc(database + "." + tableName);
             if (tableDesc == null) {
                 tableDesc = new TableDesc();
@@ -116,33 +103,33 @@ public static void unLoadHiveTable(String hiveTable) throws IOException {
                 tableDesc.setUuid(UUID.randomUUID().toString());
                 tableDesc.setLastModified(0);
             }
-            if (table.getTableType() != null) {
-                tableDesc.setTableType(table.getTableType());
+            if (hiveTableMeta.tableType != null) {
+                tableDesc.setTableType(hiveTableMeta.tableType);
             }
 
-            int columnNumber = fields.size();
+            int columnNumber = hiveTableMeta.allColumns.size();
             List<ColumnDesc> columns = new ArrayList<ColumnDesc>(columnNumber);
             for (int i = 0; i < columnNumber; i++) {
-                FieldSchema field = fields.get(i);
+                HiveTableMeta.HiveTableColumnMeta field = hiveTableMeta.allColumns.get(i);
                 ColumnDesc cdesc = new ColumnDesc();
-                cdesc.setName(field.getName().toUpperCase());
+                cdesc.setName(field.name.toUpperCase());
                 // use "double" in kylin for "float"
-                if ("float".equalsIgnoreCase(field.getType())) {
+                if ("float".equalsIgnoreCase(field.dataType)) {
                     cdesc.setDatatype("double");
                 } else {
-                    cdesc.setDatatype(field.getType());
+                    cdesc.setDatatype(field.dataType);
                 }
                 cdesc.setId(String.valueOf(i + 1));
-                cdesc.setComment(field.getComment());
+                cdesc.setComment(field.comment);
                 columns.add(cdesc);
             }
             tableDesc.setColumns(columns.toArray(new ColumnDesc[columnNumber]));
 
             StringBuffer partitionColumnString = new StringBuffer();
-            for (int i = 0, n = partitionFields.size(); i < n; i++) {
+            for (int i = 0, n = hiveTableMeta.partitionColumns.size(); i < n; i++) {
                 if (i > 0)
                     partitionColumnString.append(", ");
-                partitionColumnString.append(partitionFields.get(i).getName().toUpperCase());
+                partitionColumnString.append(hiveTableMeta.partitionColumns.get(i).name.toUpperCase());
             }
 
             Map<String, String> map = metaMgr.getTableDescExd(tableDesc.getIdentity());
@@ -150,16 +137,16 @@ public static void unLoadHiveTable(String hiveTable) throws IOException {
             if (map == null) {
                 map = Maps.newHashMap();
             }
-            map.put(MetadataConstants.TABLE_EXD_TABLENAME, table.getTableName());
-            map.put(MetadataConstants.TABLE_EXD_LOCATION, table.getSd().getLocation());
-            map.put(MetadataConstants.TABLE_EXD_IF, table.getSd().getInputFormat());
-            map.put(MetadataConstants.TABLE_EXD_OF, table.getSd().getOutputFormat());
-            map.put(MetadataConstants.TABLE_EXD_OWNER, table.getOwner());
-            map.put(MetadataConstants.TABLE_EXD_LAT, String.valueOf(table.getLastAccessTime()));
+            map.put(MetadataConstants.TABLE_EXD_TABLENAME, hiveTableMeta.tableName);
+            map.put(MetadataConstants.TABLE_EXD_LOCATION, hiveTableMeta.sdLocation);
+            map.put(MetadataConstants.TABLE_EXD_IF, hiveTableMeta.sdInputFormat);
+            map.put(MetadataConstants.TABLE_EXD_OF, hiveTableMeta.sdOutputFormat);
+            map.put(MetadataConstants.TABLE_EXD_OWNER, hiveTableMeta.owner);
+            map.put(MetadataConstants.TABLE_EXD_LAT, String.valueOf(hiveTableMeta.lastAccessTime));
             map.put(MetadataConstants.TABLE_EXD_PC, partitionColumnString.toString());
-            map.put(MetadataConstants.TABLE_EXD_TFS, String.valueOf(tableSize));
-            map.put(MetadataConstants.TABLE_EXD_TNF, String.valueOf(tableFileNum));
-            map.put(MetadataConstants.TABLE_EXD_PARTITIONED, Boolean.valueOf(partitionFields != null && partitionFields.size() > 0).toString());
+            map.put(MetadataConstants.TABLE_EXD_TFS, String.valueOf(hiveTableMeta.fileSize));
+            map.put(MetadataConstants.TABLE_EXD_TNF, String.valueOf(hiveTableMeta.fileNum));
+            map.put(MetadataConstants.TABLE_EXD_PARTITIONED, Boolean.valueOf(hiveTableMeta.partitionColumns.size() > 0).toString());
 
             metaMgr.saveSourceTable(tableDesc);
             metaMgr.saveTableExd(tableDesc.getIdentity(), map);
@@ -168,4 +155,5 @@ public static void unLoadHiveTable(String hiveTable) throws IOException {
 
         return loadedTables;
     }
+
 }
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTable.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTable.java
index dcc43ff39d..97e999045f 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTable.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTable.java
@@ -37,11 +37,17 @@
     final private String database;
     final private String hiveTable;
 
-    private HiveClient hiveClient;
+    private IHiveClient hiveClient;
+    private HiveTableMeta hiveTableMeta;
 
     public HiveTable(TableDesc tableDesc) {
         this.database = tableDesc.getDatabase();
         this.hiveTable = tableDesc.getName();
+        try {
+            this.hiveTableMeta = getHiveClient().getHiveTableMeta(database, hiveTable);
+        } catch (Exception e) {
+            throw new RuntimeException("cannot get HiveTableMeta", e);
+        }
     }
 
     @Override
@@ -58,7 +64,7 @@ public TableSignature getSignature() throws IOException {
             long lastModified = sizeAndLastModified.getSecond();
 
             // for non-native hive table, cannot rely on size & last modified on HDFS
-            if (getHiveClient().isNativeTable(database, hiveTable) == false) {
+            if (this.hiveTableMeta.isNative == false) {
                 lastModified = System.currentTimeMillis(); // assume table is ever changing
             }
 
@@ -80,13 +86,13 @@ private String computeHDFSLocation() throws Exception {
             return override;
         }
 
-        return getHiveClient().getHiveTableLocation(database, hiveTable);
+        return this.hiveTableMeta.sdLocation;
     }
 
-    public HiveClient getHiveClient() {
+    public IHiveClient getHiveClient() {
 
         if (hiveClient == null) {
-            hiveClient = new HiveClient();
+            hiveClient = HiveClientFactory.getHiveClient();
         }
         return hiveClient;
     }
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMeta.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMeta.java
new file mode 100644
index 0000000000..784a0bbc2b
--- /dev/null
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMeta.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.hive;
+
+import java.util.List;
+
+class HiveTableMeta {
+    static class HiveTableColumnMeta {
+        String name;
+        String dataType;
+        String comment;
+
+        public HiveTableColumnMeta(String name, String dataType, String comment) {
+            this.name = name;
+            this.dataType = dataType;
+            this.comment = comment;
+        }
+
+        @Override
+        public String toString() {
+            return "HiveTableColumnMeta{" + "name='" + name + '\'' + ", dataType='" + dataType + '\'' + ", comment='" + comment + '\'' + '}';
+        }
+    }
+
+    String tableName;
+    String sdLocation;//sd is short for storage descriptor
+    String sdInputFormat;
+    String sdOutputFormat;
+    String owner;
+    String tableType;
+    int lastAccessTime;
+    long fileSize;
+    long fileNum;
+    boolean isNative;
+    List<HiveTableColumnMeta> allColumns;
+    List<HiveTableColumnMeta> partitionColumns;
+
+    public HiveTableMeta(String tableName, String sdLocation, String sdInputFormat, String sdOutputFormat, String owner, String tableType, int lastAccessTime, long fileSize, long fileNum, boolean isNative, List<HiveTableColumnMeta> allColumns, List<HiveTableColumnMeta> partitionColumns) {
+        this.tableName = tableName;
+        this.sdLocation = sdLocation;
+        this.sdInputFormat = sdInputFormat;
+        this.sdOutputFormat = sdOutputFormat;
+        this.owner = owner;
+        this.tableType = tableType;
+        this.lastAccessTime = lastAccessTime;
+        this.fileSize = fileSize;
+        this.fileNum = fileNum;
+        this.isNative = isNative;
+        this.allColumns = allColumns;
+        this.partitionColumns = partitionColumns;
+    }
+
+    @Override
+    public String toString() {
+        return "HiveTableMeta{" + "tableName='" + tableName + '\'' + ", sdLocation='" + sdLocation + '\'' + ", sdInputFormat='" + sdInputFormat + '\'' + ", sdOutputFormat='" + sdOutputFormat + '\'' + ", owner='" + owner + '\'' + ", tableType='" + tableType + '\'' + ", lastAccessTime=" + lastAccessTime + ", fileSize=" + fileSize + ", fileNum=" + fileNum + ", isNative=" + isNative + ", allColumns=" + allColumns + ", partitionColumns=" + partitionColumns + '}';
+    }
+}
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMetaBuilder.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMetaBuilder.java
new file mode 100644
index 0000000000..7a3e5d6e5c
--- /dev/null
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/HiveTableMetaBuilder.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.hive;
+
+import java.util.List;
+
+import com.google.common.collect.Lists;
+
+public class HiveTableMetaBuilder {
+    private String tableName;
+    private String sdLocation;
+    private String sdInputFormat;
+    private String sdOutputFormat;
+    private String owner;
+    private String tableType;
+    private int lastAccessTime;
+    private long fileSize;
+    private long fileNum;
+    private boolean isNative = true;
+    private List<HiveTableMeta.HiveTableColumnMeta> allColumns = Lists.newArrayList();
+    private List<HiveTableMeta.HiveTableColumnMeta> partitionColumns = Lists.newArrayList();
+
+    public HiveTableMetaBuilder setTableName(String tableName) {
+        this.tableName = tableName;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setSdLocation(String sdLocation) {
+        this.sdLocation = sdLocation;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setSdInputFormat(String sdInputFormat) {
+        this.sdInputFormat = sdInputFormat;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setSdOutputFormat(String sdOutputFormat) {
+        this.sdOutputFormat = sdOutputFormat;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setOwner(String owner) {
+        this.owner = owner;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setTableType(String tableType) {
+        this.tableType = tableType;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setLastAccessTime(int lastAccessTime) {
+        this.lastAccessTime = lastAccessTime;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setFileSize(long fileSize) {
+        this.fileSize = fileSize;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setFileNum(long fileNum) {
+        this.fileNum = fileNum;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setIsNative(boolean isNative) {
+        this.isNative = isNative;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setAllColumns(List<HiveTableMeta.HiveTableColumnMeta> allColumns) {
+        this.allColumns = allColumns;
+        return this;
+    }
+
+    public HiveTableMetaBuilder setPartitionColumns(List<HiveTableMeta.HiveTableColumnMeta> partitionColumns) {
+        this.partitionColumns = partitionColumns;
+        return this;
+    }
+
+    public HiveTableMeta createHiveTableMeta() {
+        return new HiveTableMeta(tableName, sdLocation, sdInputFormat, sdOutputFormat, owner, tableType, lastAccessTime, fileSize, fileNum, isNative, allColumns, partitionColumns);
+    }
+}
\ No newline at end of file
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/HqlExecutable.java b/source-hive/src/main/java/org/apache/kylin/source/hive/HqlExecutable.java
deleted file mode 100644
index 79493a4ef7..0000000000
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/HqlExecutable.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.source.hive;
-
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.kylin.common.util.JsonUtil;
-import org.apache.kylin.job.exception.ExecuteException;
-import org.apache.kylin.job.execution.AbstractExecutable;
-import org.apache.kylin.job.execution.ExecutableContext;
-import org.apache.kylin.job.execution.ExecuteResult;
-import org.datanucleus.store.types.backed.HashMap;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.google.common.collect.Lists;
-
-/**
- */
-public class HqlExecutable extends AbstractExecutable {
-
-    private static final Logger logger = LoggerFactory.getLogger(HqlExecutable.class);
-
-    private static final String HQL = "hql";
-    private static final String HIVE_CONFIG = "hive-config";
-
-    public HqlExecutable() {
-        super();
-    }
-
-    @Override
-    protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
-        try {
-            Map<String, String> configMap = getConfiguration();
-            HiveClient hiveClient = new HiveClient(configMap);
-
-            for (String hql : getHqls()) {
-                hiveClient.executeHQL(hql);
-            }
-            return new ExecuteResult(ExecuteResult.State.SUCCEED);
-        } catch (Exception e) {
-            logger.error("error run hive query:" + getHqls(), e);
-            return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
-        }
-    }
-
-    public void setConfiguration(Map<String, String> configMap) {
-        if (configMap != null) {
-            String configStr = "";
-            try {
-                configStr = JsonUtil.writeValueAsString(configMap);
-            } catch (JsonProcessingException e) {
-                e.printStackTrace();
-            }
-            setParam(HIVE_CONFIG, configStr);
-        }
-    }
-
-    @SuppressWarnings("unchecked")
-    private Map<String, String> getConfiguration() {
-        String configStr = getParam(HIVE_CONFIG);
-        Map<String, String> result = null;
-        if (configStr != null) {
-            try {
-                result = JsonUtil.readValue(configStr, HashMap.class);
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-
-        return result;
-    }
-
-    public void setHqls(List<String> hqls) {
-        setParam(HQL, StringUtils.join(hqls, ";"));
-    }
-
-    private List<String> getHqls() {
-        final String hqls = getParam(HQL);
-        if (hqls != null) {
-            return Lists.newArrayList(StringUtils.split(hqls, ";"));
-        } else {
-            return Collections.emptyList();
-        }
-    }
-
-}
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/IHiveClient.java b/source-hive/src/main/java/org/apache/kylin/source/hive/IHiveClient.java
new file mode 100644
index 0000000000..f218cce2f1
--- /dev/null
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/IHiveClient.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.hive;
+
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+
+import java.io.IOException;
+import java.util.List;
+
+public interface IHiveClient {
+    void executeHQL(String hql) throws CommandNeedRetryException, IOException;
+
+    void executeHQL(String[] hqls) throws CommandNeedRetryException, IOException;
+
+    HiveTableMeta getHiveTableMeta(String database, String tableName) throws Exception;
+
+    List<String> getHiveDbNames() throws Exception;
+
+    List<String> getHiveTableNames(String database) throws Exception;
+}
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/SchemaChecker.java b/source-hive/src/main/java/org/apache/kylin/source/hive/SchemaChecker.java
index 319ebee5c5..87a88701e2 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/SchemaChecker.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/SchemaChecker.java
@@ -27,8 +27,6 @@
 
 import javax.annotation.Nullable;
 
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.model.CubeDesc;
@@ -46,7 +44,7 @@
 import com.google.common.collect.Sets;
 
 public class SchemaChecker {
-    private final HiveClient hiveClient;
+    private final IHiveClient hiveClient;
     private final MetadataManager metadataManager;
     private final CubeManager cubeManager;
 
@@ -87,23 +85,16 @@ static CheckResult invalidOnIncompatibleSchema(String tableName, List<String> re
         }
     }
 
-    SchemaChecker(HiveClient hiveClient, MetadataManager metadataManager, CubeManager cubeManager) {
+    SchemaChecker(IHiveClient hiveClient, MetadataManager metadataManager, CubeManager cubeManager) {
         this.hiveClient = checkNotNull(hiveClient, "hiveClient is null");
         this.metadataManager = checkNotNull(metadataManager, "metadataManager is null");
         this.cubeManager = checkNotNull(cubeManager, "cubeManager is null");
     }
 
-    private List<FieldSchema> fetchSchema(String dbName, String tblName) throws Exception {
-        List<FieldSchema> fields = Lists.newArrayList();
-        fields.addAll(hiveClient.getHiveTableFields(dbName, tblName));
-
-        Table table = hiveClient.getHiveTable(dbName, tblName);
-        List<FieldSchema> partitionFields = table.getPartitionKeys();
-        if (partitionFields != null) {
-            fields.addAll(partitionFields);
-        }
-
-        return fields;
+    private List<HiveTableMeta.HiveTableColumnMeta> fetchSchema(String dbName, String tblName) throws Exception {
+        List<HiveTableMeta.HiveTableColumnMeta> columnMetas = Lists.newArrayList();
+        columnMetas.addAll(hiveClient.getHiveTableMeta(dbName, tblName).allColumns);
+        return columnMetas;
     }
 
     private List<CubeInstance> findCubeByTable(final String fullTableName) {
@@ -128,12 +119,12 @@ public boolean apply(@Nullable CubeInstance cube) {
         return ImmutableList.copyOf(relatedCubes);
     }
 
-    private boolean isColumnCompatible(ColumnDesc column, FieldSchema field) {
-        if (!column.getName().equalsIgnoreCase(field.getName())) {
+    private boolean isColumnCompatible(ColumnDesc column, HiveTableMeta.HiveTableColumnMeta field) {
+        if (!column.getName().equalsIgnoreCase(field.name)) {
             return false;
         }
 
-        String typeStr = field.getType();
+        String typeStr = field.dataType;
         // kylin uses double internally for float, see HiveSourceTableLoader.java
         // TODO should this normalization to be in DataType class ?
         if ("float".equalsIgnoreCase(typeStr)) {
@@ -159,7 +150,7 @@ private boolean isColumnCompatible(ColumnDesc column, FieldSchema field) {
      * @param fieldsMap current hive schema of `table`
      * @return true if all columns used in `cube` has compatible schema with `fieldsMap`, false otherwise
      */
-    private List<String> checkAllColumnsInCube(CubeInstance cube, TableDesc table, Map<String, FieldSchema> fieldsMap) {
+    private List<String> checkAllColumnsInCube(CubeInstance cube, TableDesc table, Map<String, HiveTableMeta.HiveTableColumnMeta> fieldsMap) {
         Set<ColumnDesc> usedColumns = Sets.newHashSet();
         for (TblColRef col : cube.getAllColumns()) {
             usedColumns.add(col.getColumnDesc());
@@ -168,7 +159,7 @@ private boolean isColumnCompatible(ColumnDesc column, FieldSchema field) {
         List<String> violateColumns = Lists.newArrayList();
         for (ColumnDesc column : table.getColumns()) {
             if (usedColumns.contains(column)) {
-                FieldSchema field = fieldsMap.get(column.getName());
+                HiveTableMeta.HiveTableColumnMeta field = fieldsMap.get(column.getName());
                 if (field == null || !isColumnCompatible(column, field)) {
                     violateColumns.add(column.getName());
                 }
@@ -184,7 +175,7 @@ private boolean isColumnCompatible(ColumnDesc column, FieldSchema field) {
      * @param fields current table metadata in hive
      * @return true if only new columns are appended in hive, false otherwise
      */
-    private boolean checkAllColumnsInTableDesc(TableDesc table, List<FieldSchema> fields) {
+    private boolean checkAllColumnsInTableDesc(TableDesc table, List<HiveTableMeta.HiveTableColumnMeta> fields) {
         if (table.getColumnCount() > fields.size()) {
             return false;
         }
@@ -206,15 +197,15 @@ public CheckResult allowReload(String dbName, String tblName) {
             return CheckResult.validOnFirstLoad(fullTableName);
         }
 
-        List<FieldSchema> currentFields;
-        Map<String, FieldSchema> currentFieldsMap = Maps.newHashMap();
+        List<HiveTableMeta.HiveTableColumnMeta> currentFields;
+        Map<String, HiveTableMeta.HiveTableColumnMeta> currentFieldsMap = Maps.newHashMap();
         try {
             currentFields = fetchSchema(dbName, tblName);
         } catch (Exception e) {
             return CheckResult.invalidOnFetchSchema(fullTableName, e);
         }
-        for (FieldSchema field : currentFields) {
-            currentFieldsMap.put(field.getName().toUpperCase(), field);
+        for (HiveTableMeta.HiveTableColumnMeta field : currentFields) {
+            currentFieldsMap.put(field.name.toUpperCase(), field);
         }
 
         List<String> issues = Lists.newArrayList();
diff --git a/source-hive/src/test/java/org/apache/kylin/source/hive/BeelineOptionsProcessorTest.java b/source-hive/src/test/java/org/apache/kylin/source/hive/BeelineOptionsProcessorTest.java
new file mode 100644
index 0000000000..84da0a2631
--- /dev/null
+++ b/source-hive/src/test/java/org/apache/kylin/source/hive/BeelineOptionsProcessorTest.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.source.hive;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.lang.StringUtils;
+import org.junit.Ignore;
+import org.junit.Test;
+
+public class BeelineOptionsProcessorTest {
+    @Ignore
+    @Test
+    public void foo() {
+        String param = "-n root --hiveconf hive.security.authorization.sqlstd.confwhitelist.append='mapreduce.job.*|dfs.*' -u 'jdbc:hive2://localhost:10000'";
+        BeelineOptionsProcessor processor = new BeelineOptionsProcessor();
+        CommandLine commandLine = processor.process(StringUtils.split(param));
+        String n = commandLine.getOptionValue('n');
+        String u = commandLine.getOptionValue('u');
+        String p = commandLine.getOptionValue('p');
+
+    }
+}
diff --git a/source-kafka/pom.xml b/source-kafka/pom.xml
index e2100c4d02..efd3750451 100644
--- a/source-kafka/pom.xml
+++ b/source-kafka/pom.xml
@@ -17,8 +17,7 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
     <artifactId>kylin-source-kafka</artifactId>
@@ -28,7 +27,7 @@
     <parent>
         <groupId>org.apache.kylin</groupId>
         <artifactId>kylin</artifactId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
 
     </parent>
 
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
index fb2a949863..8695276796 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaMRInput.java
@@ -19,8 +19,12 @@
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.util.Arrays;
 import java.util.List;
 
+import javax.annotation.Nullable;
+
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Job;
@@ -30,26 +34,33 @@
 import org.apache.kylin.common.util.StreamingMessage;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.cube.model.CubeJoinedFlatTableDesc;
+import org.apache.kylin.engine.mr.HadoopUtil;
 import org.apache.kylin.engine.mr.IMRInput;
 import org.apache.kylin.engine.mr.JobBuilderSupport;
 import org.apache.kylin.engine.mr.common.BatchConstants;
 import org.apache.kylin.engine.mr.common.MapReduceExecutable;
 import org.apache.kylin.engine.mr.steps.CubingExecutableUtil;
 import org.apache.kylin.job.JoinedFlatTable;
+import org.apache.kylin.job.constant.ExecutableConstants;
 import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.exception.ExecuteException;
+import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.DefaultChainedExecutable;
+import org.apache.kylin.job.execution.ExecutableContext;
+import org.apache.kylin.job.execution.ExecuteResult;
+import org.apache.kylin.metadata.model.ColumnDesc;
 import org.apache.kylin.metadata.model.IJoinedFlatTableDesc;
 import org.apache.kylin.metadata.model.ISegment;
 import org.apache.kylin.metadata.model.TableDesc;
-import org.apache.kylin.metadata.model.TableRef;
 import org.apache.kylin.metadata.model.TblColRef;
 import org.apache.kylin.source.kafka.config.KafkaConfig;
 import org.apache.kylin.source.kafka.hadoop.KafkaFlatTableJob;
 import org.apache.kylin.source.kafka.job.MergeOffsetStep;
-import org.apache.kylin.source.kafka.job.SeekOffsetStep;
-import org.apache.kylin.source.kafka.job.UpdateTimeRangeStep;
 
+import com.google.common.base.Function;
 import com.google.common.collect.Lists;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class KafkaMRInput implements IMRInput {
 
@@ -57,7 +68,7 @@
 
     @Override
     public IMRBatchCubingInputSide getBatchCubingInputSide(IJoinedFlatTableDesc flatDesc) {
-        this.cubeSegment = (CubeSegment) flatDesc.getSegment();
+        this.cubeSegment = (CubeSegment)flatDesc.getSegment();
         return new BatchCubingInputSide(cubeSegment);
     }
 
@@ -65,8 +76,14 @@ public IMRBatchCubingInputSide getBatchCubingInputSide(IJoinedFlatTableDesc flat
     public IMRTableInputFormat getTableInputFormat(TableDesc table) {
         KafkaConfigManager kafkaConfigManager = KafkaConfigManager.getInstance(KylinConfig.getInstanceFromEnv());
         KafkaConfig kafkaConfig = kafkaConfigManager.getKafkaConfig(table.getIdentity());
-        TableRef tableRef = cubeSegment.getCubeInstance().getDataModelDesc().findTable(table.getIdentity());
-        List<TblColRef> columns = Lists.newArrayList(tableRef.getColumns());
+        List<TblColRef> columns = Lists.transform(Arrays.asList(table.getColumns()), new Function<ColumnDesc, TblColRef>() {
+            @Nullable
+            @Override
+            public TblColRef apply(ColumnDesc input) {
+                return input.getRef();
+            }
+        });
+
         return new KafkaTableInputFormat(cubeSegment, columns, kafkaConfig, null);
     }
 
@@ -77,15 +94,11 @@ public IMRBatchMergeInputSide getBatchMergeInputSide(ISegment seg) {
 
     public static class KafkaTableInputFormat implements IMRTableInputFormat {
         private final CubeSegment cubeSegment;
-        private List<TblColRef> columns;
         private StreamingParser streamingParser;
-        private KafkaConfig kafkaConfig;
         private final JobEngineConfig conf;
 
         public KafkaTableInputFormat(CubeSegment cubeSegment, List<TblColRef> columns, KafkaConfig kafkaConfig, JobEngineConfig conf) {
             this.cubeSegment = cubeSegment;
-            this.columns = columns;
-            this.kafkaConfig = kafkaConfig;
             this.conf = conf;
             try {
                 streamingParser = StreamingParser.getStreamingParser(kafkaConfig.getParserName(), kafkaConfig.getParserProperties(), columns);
@@ -131,21 +144,9 @@ public BatchCubingInputSide(CubeSegment seg) {
 
         @Override
         public void addStepPhase1_CreateFlatTable(DefaultChainedExecutable jobFlow) {
-            jobFlow.addTask(createUpdateSegmentOffsetStep(jobFlow.getId()));
             jobFlow.addTask(createSaveKafkaDataStep(jobFlow.getId()));
         }
 
-        public SeekOffsetStep createUpdateSegmentOffsetStep(String jobId) {
-            final SeekOffsetStep result = new SeekOffsetStep();
-            result.setName("Seek and update offset step");
-
-            CubingExecutableUtil.setCubeName(seg.getRealization().getName(), result.getParams());
-            CubingExecutableUtil.setSegmentId(seg.getUuid(), result.getParams());
-            CubingExecutableUtil.setCubingJobId(jobId, result.getParams());
-
-            return result;
-        }
-
         private MapReduceExecutable createSaveKafkaDataStep(String jobId) {
             MapReduceExecutable result = new MapReduceExecutable();
 
@@ -167,14 +168,10 @@ private MapReduceExecutable createSaveKafkaDataStep(String jobId) {
 
         @Override
         public void addStepPhase4_Cleanup(DefaultChainedExecutable jobFlow) {
-            final UpdateTimeRangeStep result = new UpdateTimeRangeStep();
-            result.setName("Update Segment Time Range");
-            CubingExecutableUtil.setCubeName(seg.getRealization().getName(), result.getParams());
-            CubingExecutableUtil.setSegmentId(seg.getUuid(), result.getParams());
-            CubingExecutableUtil.setCubingJobId(jobFlow.getId(), result.getParams());
-            JobBuilderSupport jobBuilderSupport = new JobBuilderSupport(seg, "SYSTEM");
-            result.getParams().put(BatchConstants.CFG_OUTPUT_PATH, jobBuilderSupport.getFactDistinctColumnsPath(jobFlow.getId()));
-            jobFlow.addTask(result);
+            GarbageCollectionStep step = new GarbageCollectionStep();
+            step.setName(ExecutableConstants.STEP_NAME_KAFKA_CLEANUP);
+            step.setDataPath(outputPath);
+            jobFlow.addTask(step);
 
         }
 
@@ -211,4 +208,36 @@ public void addStepPhase1_MergeDictionary(DefaultChainedExecutable jobFlow) {
         }
     }
 
+    public static class GarbageCollectionStep extends AbstractExecutable {
+        private static final Logger logger = LoggerFactory.getLogger(GarbageCollectionStep.class);
+
+        @Override
+        protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
+            try {
+                rmdirOnHDFS(getDataPath());
+            } catch (IOException e) {
+                logger.error("job:" + getId() + " execute finished with exception", e);
+                return new ExecuteResult(ExecuteResult.State.ERROR, e.getMessage());
+            }
+
+            return new ExecuteResult(ExecuteResult.State.SUCCEED, "HDFS path " + getDataPath() + " is dropped.\n");
+        }
+
+        private void rmdirOnHDFS(String path) throws IOException {
+            Path externalDataPath = new Path(path);
+            FileSystem fs = FileSystem.get(externalDataPath.toUri(), HadoopUtil.getCurrentConfiguration());
+            if (fs.exists(externalDataPath)) {
+                fs.delete(externalDataPath, true);
+            }
+        }
+
+        public void setDataPath(String externalDataPath) {
+            setParam("dataPath", externalDataPath);
+        }
+
+        private String getDataPath() {
+            return getParam("dataPath");
+        }
+
+    }
 }
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSource.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSource.java
index 208c0ce015..b0c8e7f525 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSource.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/KafkaSource.java
@@ -18,19 +18,33 @@
 
 package org.apache.kylin.source.kafka;
 
-import com.google.common.collect.Lists;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.PartitionInfo;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.engine.mr.IMRInput;
-import org.apache.kylin.metadata.streaming.StreamingConfig;
+import org.apache.kylin.metadata.model.IBuildable;
 import org.apache.kylin.metadata.model.TableDesc;
+import org.apache.kylin.metadata.streaming.StreamingConfig;
 import org.apache.kylin.source.ISource;
 import org.apache.kylin.source.ReadableTable;
+import org.apache.kylin.source.SourcePartition;
 import org.apache.kylin.source.kafka.config.KafkaConfig;
+import org.apache.kylin.source.kafka.util.KafkaClient;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import java.util.List;
+import com.google.common.collect.Lists;
 
 //used by reflection
 public class KafkaSource implements ISource {
 
+    private static final Logger logger = LoggerFactory.getLogger(KafkaSource.class);
+
     @SuppressWarnings("unchecked")
     @Override
     public <I> I adaptToBuildEngine(Class<I> engineInterface) {
@@ -54,4 +68,121 @@ public ReadableTable createReadableTable(TableDesc tableDesc) {
         return dependentResources;
     }
 
+    @Override
+    public SourcePartition parsePartitionBeforeBuild(IBuildable buildable, SourcePartition srcPartition) {
+        checkSourceOffsets(srcPartition);
+        final SourcePartition result = SourcePartition.getCopyOf(srcPartition);
+        final CubeInstance cube = (CubeInstance) buildable;
+        if (result.getStartOffset() == 0) {
+            final CubeSegment last = cube.getLastSegment();
+            if (last != null) {
+                logger.debug("Last segment exists, continue from last segment " + last.getName() + "'s end position: " + last.getSourcePartitionOffsetEnd());
+                // from last seg's end position
+                result.setSourcePartitionOffsetStart(last.getSourcePartitionOffsetEnd());
+            } else if (cube.getDescriptor().getPartitionOffsetStart() != null && cube.getDescriptor().getPartitionOffsetStart().size() > 0) {
+                logger.debug("Last segment doesn't exist, use the start offset that be initiated previously: " + cube.getDescriptor().getPartitionOffsetStart());
+                result.setSourcePartitionOffsetStart(cube.getDescriptor().getPartitionOffsetStart());
+            } else {
+                // from the topic's very begining;
+                logger.debug("Last segment doesn't exist, and didn't initiate the start offset, will seek from topic's very beginning.");
+                result.setSourcePartitionOffsetStart(KafkaClient.getEarliestOffsets(cube));
+            }
+        }
+
+        final KafkaConfig kafakaConfig = KafkaConfigManager.getInstance(KylinConfig.getInstanceFromEnv()).getKafkaConfig(cube.getFactTable());
+        final String brokers = KafkaClient.getKafkaBrokers(kafakaConfig);
+        final String topic = kafakaConfig.getTopic();
+        try (final KafkaConsumer consumer = KafkaClient.getKafkaConsumer(brokers, cube.getName(), null)) {
+            final List<PartitionInfo> partitionInfos = consumer.partitionsFor(topic);
+            for (PartitionInfo partitionInfo : partitionInfos) {
+                if (result.getSourcePartitionOffsetStart().containsKey(partitionInfo.partition()) == false) {
+                    // has new partition added
+                    logger.debug("has new partition added");
+                    long earliest = KafkaClient.getEarliestOffset(consumer, topic, partitionInfo.partition());
+                    logger.debug("new partition " + partitionInfo.partition() + " starts from " + earliest);
+                    result.getSourcePartitionOffsetStart().put(partitionInfo.partition(), earliest);
+                }
+            }
+        }
+
+        if (result.getEndOffset() == Long.MAX_VALUE) {
+            logger.debug("Seek end offsets from topic");
+            Map<Integer, Long> latestOffsets = KafkaClient.getCurrentOffsets(cube);
+            logger.debug("The end offsets are " + latestOffsets);
+
+            for (Integer partitionId : latestOffsets.keySet()) {
+                if (result.getSourcePartitionOffsetStart().containsKey(partitionId)) {
+                    if (result.getSourcePartitionOffsetStart().get(partitionId) > latestOffsets.get(partitionId)) {
+                        throw new IllegalArgumentException("Partition " + partitionId + " end offset (" + latestOffsets.get(partitionId) + ") is smaller than start offset ( " + result.getSourcePartitionOffsetStart().get(partitionId) + ")");
+                    }
+                } else {
+                    throw new IllegalStateException("New partition added in between, retry.");
+                }
+            }
+            result.setSourcePartitionOffsetEnd(latestOffsets);
+        }
+
+        long totalStartOffset = 0, totalEndOffset = 0;
+        for (Long v : result.getSourcePartitionOffsetStart().values()) {
+            totalStartOffset += v;
+        }
+        for (Long v : result.getSourcePartitionOffsetEnd().values()) {
+            totalEndOffset += v;
+        }
+
+        if (totalStartOffset > totalEndOffset) {
+            throw new IllegalArgumentException("Illegal offset: start: " + totalStartOffset + ", end: " + totalEndOffset);
+        }
+
+        if (totalStartOffset == totalEndOffset) {
+            throw new IllegalArgumentException("No new message comes, startOffset = endOffset:" + totalStartOffset);
+        }
+
+        result.setStartOffset(totalStartOffset);
+        result.setEndOffset(totalEndOffset);
+
+        logger.debug("parsePartitionBeforeBuild() return: " + result);
+        return result;
+    }
+
+    private void checkSourceOffsets(SourcePartition srcPartition) {
+        long startOffset = srcPartition.getStartOffset();
+        long endOffset = srcPartition.getEndOffset();
+        final Map<Integer, Long> sourcePartitionOffsetStart = srcPartition.getSourcePartitionOffsetStart();
+        final Map<Integer, Long> sourcePartitionOffsetEnd = srcPartition.getSourcePartitionOffsetEnd();
+        if (endOffset <= 0 || startOffset >= endOffset) {
+            throw new IllegalArgumentException("'startOffset' need be smaller than 'endOffset'");
+        }
+
+        if (startOffset > 0) {
+            if (sourcePartitionOffsetStart == null || sourcePartitionOffsetStart.size() == 0) {
+                throw new IllegalArgumentException("When 'startOffset' is > 0, need provide each partition's start offset");
+            }
+
+            long totalOffset = 0;
+            for (Long v : sourcePartitionOffsetStart.values()) {
+                totalOffset += v;
+            }
+
+            if (totalOffset != startOffset) {
+                throw new IllegalArgumentException("Invalid 'sourcePartitionOffsetStart', doesn't match with 'startOffset'");
+            }
+        }
+
+        if (endOffset > 0 && endOffset != Long.MAX_VALUE) {
+            if (sourcePartitionOffsetEnd == null || sourcePartitionOffsetEnd.size() == 0) {
+                throw new IllegalArgumentException("When 'endOffset' is not Long.MAX_VALUE, need provide each partition's start offset");
+            }
+
+            long totalOffset = 0;
+            for (Long v : sourcePartitionOffsetEnd.values()) {
+                totalOffset += v;
+            }
+
+            if (totalOffset != endOffset) {
+                throw new IllegalArgumentException("Invalid 'sourcePartitionOffsetEnd', doesn't match with 'endOffset'");
+            }
+        }
+    }
+
 }
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaConfig.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaConfig.java
index c538acb297..157d83cb2f 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaConfig.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaConfig.java
@@ -61,6 +61,7 @@
     @JsonProperty("parserName")
     private String parserName;
 
+    @Deprecated
     @JsonProperty("margin")
     private long margin;
 
@@ -120,10 +121,12 @@ public void setName(String name) {
         this.name = name;
     }
 
+    @Deprecated
     public long getMargin() {
         return margin;
     }
 
+    @Deprecated
     public void setMargin(long margin) {
         this.margin = margin;
     }
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableJob.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableJob.java
index e20b20a214..5fe6e00df7 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableJob.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaFlatTableJob.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.source.kafka.hadoop;
 
+import org.apache.kylin.job.engine.JobEngineConfig;
 import org.apache.kylin.source.kafka.util.KafkaClient;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.fs.Path;
@@ -97,6 +98,8 @@ public int run(String[] args) throws Exception {
                 throw new IllegalArgumentException("Invalid Kafka information, brokers " + brokers + ", topic " + topic);
             }
 
+            JobEngineConfig jobEngineConfig = new JobEngineConfig(KylinConfig.getInstanceFromEnv());
+            job.getConfiguration().addResource(new Path(jobEngineConfig.getHadoopJobConfFilePath(null)));
             job.getConfiguration().set(CONFIG_KAFKA_BROKERS, brokers);
             job.getConfiguration().set(CONFIG_KAFKA_TOPIC, topic);
             job.getConfiguration().set(CONFIG_KAFKA_TIMEOUT, String.valueOf(kafkaConfig.getTimeout()));
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputFormat.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputFormat.java
index 81f6bac089..fe0e2cca5c 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputFormat.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/hadoop/KafkaInputFormat.java
@@ -23,9 +23,6 @@
 import java.util.List;
 import java.util.Map;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Maps;
-import org.apache.kylin.source.kafka.util.KafkaClient;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.LongWritable;
@@ -36,6 +33,10 @@
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.kafka.clients.consumer.KafkaConsumer;
 import org.apache.kafka.common.PartitionInfo;
+import org.apache.kylin.source.kafka.util.KafkaClient;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Maps;
 
 /**
  * Convert Kafka topic to Hadoop InputFormat
@@ -45,16 +46,16 @@
 
     @Override
     public List<InputSplit> getSplits(JobContext context) throws IOException, InterruptedException {
-        Configuration conf = context.getConfiguration();
+        final Configuration conf = context.getConfiguration();
 
-        String brokers = conf.get(KafkaFlatTableJob.CONFIG_KAFKA_BROKERS);
-        String inputTopic = conf.get(KafkaFlatTableJob.CONFIG_KAFKA_TOPIC);
-        String consumerGroup = conf.get(KafkaFlatTableJob.CONFIG_KAFKA_CONSUMER_GROUP);
-        Integer partitionMin = Integer.valueOf(conf.get(KafkaFlatTableJob.CONFIG_KAFKA_PARITION_MIN));
-        Integer partitionMax = Integer.valueOf(conf.get(KafkaFlatTableJob.CONFIG_KAFKA_PARITION_MAX));
+        final String brokers = conf.get(KafkaFlatTableJob.CONFIG_KAFKA_BROKERS);
+        final String inputTopic = conf.get(KafkaFlatTableJob.CONFIG_KAFKA_TOPIC);
+        final String consumerGroup = conf.get(KafkaFlatTableJob.CONFIG_KAFKA_CONSUMER_GROUP);
+        final Integer partitionMin = Integer.valueOf(conf.get(KafkaFlatTableJob.CONFIG_KAFKA_PARITION_MIN));
+        final Integer partitionMax = Integer.valueOf(conf.get(KafkaFlatTableJob.CONFIG_KAFKA_PARITION_MAX));
 
-        Map<Integer, Long> startOffsetMap = Maps.newHashMap();
-        Map<Integer, Long> endOffsetMap = Maps.newHashMap();
+        final Map<Integer, Long> startOffsetMap = Maps.newHashMap();
+        final Map<Integer, Long> endOffsetMap = Maps.newHashMap();
         for (int i = partitionMin; i <= partitionMax; i++) {
             String start = conf.get(KafkaFlatTableJob.CONFIG_KAFKA_PARITION_START + i);
             String end = conf.get(KafkaFlatTableJob.CONFIG_KAFKA_PARITION_END + i);
@@ -64,23 +65,19 @@
             }
         }
 
-        List<InputSplit> splits = new ArrayList<InputSplit>();
+        final List<InputSplit> splits = new ArrayList<InputSplit>();
         try (KafkaConsumer<String, String> consumer = KafkaClient.getKafkaConsumer(brokers, consumerGroup, null)) {
-            List<PartitionInfo> partitionInfos = consumer.partitionsFor(inputTopic);
+            final List<PartitionInfo> partitionInfos = consumer.partitionsFor(inputTopic);
             Preconditions.checkArgument(partitionInfos.size() == startOffsetMap.size(), "partition number mismatch with server side");
             for (int i = 0; i < partitionInfos.size(); i++) {
-                PartitionInfo partition = partitionInfos.get(i);
+                final PartitionInfo partition = partitionInfos.get(i);
                 int partitionId = partition.partition();
                 if (startOffsetMap.containsKey(partitionId) == false) {
                     throw new IllegalStateException("Partition '" + partitionId + "' not exists.");
                 }
 
-                if (endOffsetMap.get(partitionId) >  startOffsetMap.get(partitionId)) {
-                    InputSplit split = new KafkaInputSplit(
-                            brokers, inputTopic,
-                            partitionId,
-                            startOffsetMap.get(partitionId), endOffsetMap.get(partitionId)
-                    );
+                if (endOffsetMap.get(partitionId) > startOffsetMap.get(partitionId)) {
+                    InputSplit split = new KafkaInputSplit(brokers, inputTopic, partitionId, startOffsetMap.get(partitionId), endOffsetMap.get(partitionId));
                     splits.add(split);
                 }
             }
@@ -89,9 +86,7 @@
     }
 
     @Override
-    public RecordReader<LongWritable, BytesWritable> createRecordReader(
-            InputSplit arg0, TaskAttemptContext arg1) throws IOException,
-            InterruptedException {
+    public RecordReader<LongWritable, BytesWritable> createRecordReader(InputSplit arg0, TaskAttemptContext arg1) throws IOException, InterruptedException {
         return new KafkaInputRecordReader();
     }
 
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/job/SeekOffsetStep.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/job/SeekOffsetStep.java
index 98d6e4db7a..acaa7518fd 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/job/SeekOffsetStep.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/job/SeekOffsetStep.java
@@ -17,28 +17,15 @@
  */
 package org.apache.kylin.source.kafka.job;
 
-import org.apache.kylin.source.kafka.KafkaConfigManager;
-import org.apache.kylin.source.kafka.util.KafkaClient;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.common.PartitionInfo;
-import org.apache.kylin.cube.CubeInstance;
-import org.apache.kylin.cube.CubeManager;
-import org.apache.kylin.cube.CubeSegment;
-import org.apache.kylin.cube.CubeUpdate;
-import org.apache.kylin.engine.mr.steps.CubingExecutableUtil;
 import org.apache.kylin.job.exception.ExecuteException;
 import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableContext;
 import org.apache.kylin.job.execution.ExecuteResult;
-import org.apache.kylin.source.kafka.config.KafkaConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-
 /**
+ * Deprecated, not in use.
  */
 public class SeekOffsetStep extends AbstractExecutable {
 
@@ -50,97 +37,8 @@ public SeekOffsetStep() {
 
     @Override
     protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
-        final CubeManager cubeManager = CubeManager.getInstance(context.getConfig());
-        final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams()));
-        final CubeSegment segment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));
-
-        Map<Integer, Long> startOffsets = segment.getSourcePartitionOffsetStart();
-        Map<Integer, Long> endOffsets = segment.getSourcePartitionOffsetEnd();
-
-        if (startOffsets.size() > 0 && endOffsets.size() > 0 && startOffsets.size() == endOffsets.size()) {
-            return new ExecuteResult(ExecuteResult.State.SUCCEED, "skipped, as the offset is provided.");
-        }
-
-        final Map<Integer, Long> cubeDescStart = cube.getDescriptor().getPartitionOffsetStart();
-        if (cube.getSegments().size() == 1 &&  cubeDescStart != null && cubeDescStart.size() > 0) {
-            logger.info("This is the first segment, and has initiated the start offsets, will use it");
-            startOffsets = cubeDescStart;
-        }
-
-        final KafkaConfig kafakaConfig = KafkaConfigManager.getInstance(context.getConfig()).getKafkaConfig(cube.getFactTable());
-        final String brokers = KafkaClient.getKafkaBrokers(kafakaConfig);
-        final String topic = kafakaConfig.getTopic();
-        try (final KafkaConsumer consumer = KafkaClient.getKafkaConsumer(brokers, cube.getName(), null)) {
-            final List<PartitionInfo> partitionInfos = consumer.partitionsFor(topic);
-
-            if (startOffsets.isEmpty()) {
-                // user didn't specify start offset, use the biggest offset in existing segments as start
-                for (CubeSegment seg : cube.getSegments()) {
-                    Map<Integer, Long> segEndOffset = seg.getSourcePartitionOffsetEnd();
-                    for (PartitionInfo partition : partitionInfos) {
-                        int partitionId = partition.partition();
-                        if (segEndOffset.containsKey(partitionId)) {
-                            startOffsets.put(partitionId, Math.max(startOffsets.containsKey(partitionId) ? startOffsets.get(partitionId) : 0, segEndOffset.get(partitionId)));
-                        }
-                    }
-                }
-                logger.info("Get start offset for segment " + segment.getName() + ": " + startOffsets.toString());
-            }
-
-            if (partitionInfos.size() > startOffsets.size()) {
-                // has new partition added
-                for (int x = startOffsets.size(); x < partitionInfos.size(); x++) {
-                    long earliest = KafkaClient.getEarliestOffset(consumer, topic, partitionInfos.get(x).partition());
-                    startOffsets.put(partitionInfos.get(x).partition(), earliest);
-                }
-            }
-
-            if (endOffsets.isEmpty()) {
-                // user didn't specify end offset, use latest offset in kafka
-                for (PartitionInfo partitionInfo : partitionInfos) {
-                    long latest = KafkaClient.getLatestOffset(consumer, topic, partitionInfo.partition());
-                    endOffsets.put(partitionInfo.partition(), latest);
-                }
-
-                logger.info("Get end offset for segment " + segment.getName() + ": " + endOffsets.toString());
-            }
-        }
-
-        long totalStartOffset = 0, totalEndOffset = 0;
-        for (Long v : startOffsets.values()) {
-            totalStartOffset += v;
-        }
-        for (Long v : endOffsets.values()) {
-            totalEndOffset += v;
-        }
-
-        if (totalEndOffset > totalStartOffset) {
-            segment.setSourceOffsetStart(totalStartOffset);
-            segment.setSourceOffsetEnd(totalEndOffset);
-            segment.setSourcePartitionOffsetStart(startOffsets);
-            segment.setSourcePartitionOffsetEnd(endOffsets);
-            segment.setName(CubeSegment.makeSegmentName(0, 0, totalStartOffset, totalEndOffset));
-            CubeUpdate cubeBuilder = new CubeUpdate(cube);
-            cubeBuilder.setToUpdateSegs(segment);
-            try {
-                cubeManager.updateCube(cubeBuilder);
-            } catch (IOException e) {
-                return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
-            }
-            return new ExecuteResult(ExecuteResult.State.SUCCEED, "succeed, offset start: " + totalStartOffset + ", offset end: " + totalEndOffset + ", message count: " + (totalEndOffset - totalStartOffset));
-        } else {
-            CubeUpdate cubeBuilder = new CubeUpdate(cube);
-            cubeBuilder.setToRemoveSegs(segment);
-            try {
-                cubeManager.updateCube(cubeBuilder);
-            } catch (IOException e) {
-                return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
-            }
-
-            return new ExecuteResult(ExecuteResult.State.DISCARDED, "No new message comes");
-        }
-
 
+        return new ExecuteResult(ExecuteResult.State.SUCCEED, "No in use");
     }
 
 }
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/job/UpdateTimeRangeStep.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/job/UpdateTimeRangeStep.java
index d19aa635fb..8c31c70e2a 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/job/UpdateTimeRangeStep.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/job/UpdateTimeRangeStep.java
@@ -17,34 +17,15 @@
  */
 package org.apache.kylin.source.kafka.job;
 
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.lang3.time.FastDateFormat;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.kylin.common.util.DateFormat;
-import org.apache.kylin.cube.CubeInstance;
-import org.apache.kylin.cube.CubeManager;
-import org.apache.kylin.cube.CubeSegment;
-import org.apache.kylin.cube.CubeUpdate;
-import org.apache.kylin.engine.mr.HadoopUtil;
-import org.apache.kylin.engine.mr.common.BatchConstants;
-import org.apache.kylin.engine.mr.steps.CubingExecutableUtil;
 import org.apache.kylin.job.exception.ExecuteException;
 import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableContext;
 import org.apache.kylin.job.execution.ExecuteResult;
-import org.apache.kylin.metadata.datatype.DataType;
-import org.apache.kylin.metadata.model.TblColRef;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
+ * Deprecated, not in use.
  */
 public class UpdateTimeRangeStep extends AbstractExecutable {
 
@@ -56,62 +37,7 @@ public UpdateTimeRangeStep() {
 
     @Override
     protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
-        final CubeManager cubeManager = CubeManager.getInstance(context.getConfig());
-        final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams()));
-        final CubeSegment segment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));
-        final TblColRef partitionCol = segment.getCubeDesc().getModel().getPartitionDesc().getPartitionDateColumnRef();
-        final String outputPath = this.getParams().get(BatchConstants.CFG_OUTPUT_PATH);
-        final Path outputFile = new Path(outputPath, partitionCol.getName());
-
-        String minValue = null, maxValue = null, currentValue = null;
-        FSDataInputStream inputStream = null;
-        BufferedReader bufferedReader = null;
-        try {
-            FileSystem fs = HadoopUtil.getFileSystem(outputPath);
-            inputStream = fs.open(outputFile);
-            bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
-            minValue = currentValue = bufferedReader.readLine();
-            while (currentValue != null) {
-                maxValue = currentValue;
-                currentValue = bufferedReader.readLine();
-            }
-        } catch (IOException e) {
-            logger.error("fail to read file " + outputFile, e);
-            return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
-        } finally {
-            IOUtils.closeQuietly(bufferedReader);
-            IOUtils.closeQuietly(inputStream);
-        }
-
-        final DataType partitionColType = partitionCol.getType();
-        FastDateFormat dateFormat;
-        if (partitionColType.isDate()) {
-            dateFormat = DateFormat.getDateFormat(DateFormat.DEFAULT_DATE_PATTERN);
-        } else if (partitionColType.isDatetime() || partitionColType.isTimestamp()) {
-            dateFormat = DateFormat.getDateFormat(DateFormat.DEFAULT_DATETIME_PATTERN_WITHOUT_MILLISECONDS);
-        } else if (partitionColType.isStringFamily()) {
-            String partitionDateFormat = segment.getCubeDesc().getModel().getPartitionDesc().getPartitionDateFormat();
-            if (StringUtils.isEmpty(partitionDateFormat)) {
-                partitionDateFormat = DateFormat.DEFAULT_DATE_PATTERN;
-            }
-            dateFormat = DateFormat.getDateFormat(partitionDateFormat);
-        } else {
-            return new ExecuteResult(ExecuteResult.State.ERROR, "Type " + partitionColType + " is not valid partition column type");
-        }
-
-        try {
-            long startTime = dateFormat.parse(minValue).getTime();
-            long endTime = dateFormat.parse(maxValue).getTime();
-            CubeUpdate cubeBuilder = new CubeUpdate(cube);
-            segment.setDateRangeStart(startTime);
-            segment.setDateRangeEnd(endTime);
-            cubeBuilder.setToUpdateSegs(segment);
-            cubeManager.updateCube(cubeBuilder);
-            return new ExecuteResult(ExecuteResult.State.SUCCEED, "succeed");
-        } catch (Exception e) {
-            logger.error("fail to update cube segment offset", e);
-            return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
-        }
+        return new ExecuteResult(ExecuteResult.State.SUCCEED, "succeed");
     }
 
 }
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaClient.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaClient.java
index 685af6acca..446c076b5e 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaClient.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaClient.java
@@ -22,6 +22,7 @@
 import org.apache.kafka.clients.producer.KafkaProducer;
 import org.apache.kafka.common.PartitionInfo;
 import org.apache.kafka.common.TopicPartition;
+import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.source.kafka.KafkaConfigManager;
 import org.apache.kylin.source.kafka.config.BrokerConfig;
@@ -59,7 +60,7 @@ private static Properties constructDefaultKafkaProducerProperties(String brokers
         props.put("retries", 0);
         props.put("batch.size", 16384);
         props.put("linger.ms", 50);
-        props.put("timeout.ms", "30000");
+        props.put("request.timeout.ms", "30000");
         if (properties != null) {
             for (Map.Entry entry : properties.entrySet()) {
                 props.put(entry.getKey(), entry.getValue());
@@ -75,12 +76,12 @@ private static Properties constructDefaultKafkaConsumerProperties(String brokers
         props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
         props.put("group.id", consumerGroup);
         props.put("session.timeout.ms", "30000");
-        props.put("enable.auto.commit", "false");
         if (properties != null) {
             for (Map.Entry entry : properties.entrySet()) {
                 props.put(entry.getKey(), entry.getValue());
             }
         }
+        props.put("enable.auto.commit", "false");
         return props;
     }
 
@@ -117,7 +118,25 @@ public static long getLatestOffset(KafkaConsumer consumer, String topic, int par
     }
 
     public static Map<Integer, Long> getCurrentOffsets(final CubeInstance cubeInstance) {
-        final KafkaConfig kafakaConfig = KafkaConfigManager.getInstance(cubeInstance.getConfig()).getKafkaConfig(cubeInstance.getFactTable());
+        final KafkaConfig kafakaConfig = KafkaConfigManager.getInstance(KylinConfig.getInstanceFromEnv()).getKafkaConfig(cubeInstance.getFactTable());
+
+        final String brokers = KafkaClient.getKafkaBrokers(kafakaConfig);
+        final String topic = kafakaConfig.getTopic();
+
+        Map<Integer, Long> startOffsets = Maps.newHashMap();
+        try (final KafkaConsumer consumer = KafkaClient.getKafkaConsumer(brokers, cubeInstance.getName(), null)) {
+            final List<PartitionInfo> partitionInfos = consumer.partitionsFor(topic);
+            for (PartitionInfo partitionInfo : partitionInfos) {
+                long latest = getLatestOffset(consumer, topic, partitionInfo.partition());
+                startOffsets.put(partitionInfo.partition(), latest);
+            }
+        }
+        return startOffsets;
+    }
+
+
+    public static Map<Integer, Long> getEarliestOffsets(final CubeInstance cubeInstance) {
+        final KafkaConfig kafakaConfig = KafkaConfigManager.getInstance(KylinConfig.getInstanceFromEnv()).getKafkaConfig(cubeInstance.getFactTable());
 
         final String brokers = KafkaClient.getKafkaBrokers(kafakaConfig);
         final String topic = kafakaConfig.getTopic();
@@ -126,7 +145,7 @@ public static long getLatestOffset(KafkaConsumer consumer, String topic, int par
         try (final KafkaConsumer consumer = KafkaClient.getKafkaConsumer(brokers, cubeInstance.getName(), null)) {
             final List<PartitionInfo> partitionInfos = consumer.partitionsFor(topic);
             for (PartitionInfo partitionInfo : partitionInfos) {
-                long latest = KafkaClient.getLatestOffset(consumer, topic, partitionInfo.partition());
+                long latest = getEarliestOffset(consumer, topic, partitionInfo.partition());
                 startOffsets.put(partitionInfo.partition(), latest);
             }
         }
diff --git a/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaSampleProducer.java b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaSampleProducer.java
index 3d26d3d782..b8f98aa263 100644
--- a/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaSampleProducer.java
+++ b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaSampleProducer.java
@@ -26,6 +26,7 @@
 import java.util.Map;
 import java.util.Properties;
 import java.util.Random;
+import java.util.UUID;
 
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
@@ -48,7 +49,6 @@
     @SuppressWarnings("static-access")
     private static final Option OPTION_TOPIC = OptionBuilder.withArgName("topic").hasArg().isRequired(true).withDescription("Kafka topic").create("topic");
     private static final Option OPTION_BROKER = OptionBuilder.withArgName("broker").hasArg().isRequired(true).withDescription("Kafka broker").create("broker");
-    private static final Option OPTION_DELAY = OptionBuilder.withArgName("delay").hasArg().isRequired(false).withDescription("Simulated message delay in mili-seconds, default 0").create("delay");
     private static final Option OPTION_INTERVAL = OptionBuilder.withArgName("interval").hasArg().isRequired(false).withDescription("Simulated message interval in mili-seconds, default 1000").create("interval");
 
     private static final ObjectMapper mapper = new ObjectMapper();
@@ -60,21 +60,14 @@ public static void main(String[] args) throws Exception {
         String topic, broker;
         options.addOption(OPTION_TOPIC);
         options.addOption(OPTION_BROKER);
-        options.addOption(OPTION_DELAY);
-        options.addOption(OPTION_INTERVAL);
         optionsHelper.parseOptions(options, args);
 
         logger.info("options: '" + optionsHelper.getOptionsAsString() + "'");
 
         topic = optionsHelper.getOptionValue(OPTION_TOPIC);
         broker = optionsHelper.getOptionValue(OPTION_BROKER);
-        long delay = 0;
-        String delayString = optionsHelper.getOptionValue(OPTION_DELAY);
-        if (delayString != null) {
-            delay = Long.parseLong(delayString);
-        }
 
-        long interval = 1000;
+        long interval = 10;
         String intervalString = optionsHelper.getOptionValue(OPTION_INTERVAL);
         if (intervalString != null) {
             interval = Long.parseLong(intervalString);
@@ -101,6 +94,10 @@ public static void main(String[] args) throws Exception {
         devices.add("Andriod");
         devices.add("Other");
 
+        List<String> genders = new ArrayList();
+        genders.add("Male");
+        genders.add("Female");
+
         Properties props = new Properties();
         props.put("bootstrap.servers", broker);
         props.put("acks", "all");
@@ -117,15 +114,23 @@ public static void main(String[] args) throws Exception {
         Random rnd = new Random();
         Map<String, Object> record = new HashMap();
         while (alive == true) {
-            record.put("order_time", (new Date().getTime() - delay));
+            //add normal record
+            record.put("order_time", (new Date().getTime()));
             record.put("country", countries.get(rnd.nextInt(countries.size())));
             record.put("category", category.get(rnd.nextInt(category.size())));
             record.put("device", devices.get(rnd.nextInt(devices.size())));
             record.put("qty", rnd.nextInt(10));
             record.put("currency", "USD");
             record.put("amount", rnd.nextDouble() * 100);
-            ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, System.currentTimeMillis() + "", mapper.writeValueAsString(record));
-            System.out.println("Sending 1 message");
+            //add embedded record
+            Map<String, Object> user = new HashMap();
+            user.put("id", UUID.randomUUID().toString());
+            user.put("gender", genders.get(rnd.nextInt(2)));
+            user.put("age", rnd.nextInt(20) + 10);
+            record.put("user", user);
+            //send message
+            ProducerRecord<String, String> data = new ProducerRecord<>(topic, System.currentTimeMillis() + "", mapper.writeValueAsString(record));
+            System.out.println("Sending 1 message: " + record.toString());
             producer.send(data);
             Thread.sleep(interval);
         }
diff --git a/storage-hbase/pom.xml b/storage-hbase/pom.xml
index 23e72390a2..ffe03fceb8 100644
--- a/storage-hbase/pom.xml
+++ b/storage-hbase/pom.xml
@@ -17,8 +17,7 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
     <artifactId>kylin-storage-hbase</artifactId>
@@ -28,7 +27,7 @@
     <parent>
         <groupId>org.apache.kylin</groupId>
         <artifactId>kylin</artifactId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
 
     </parent>
 
@@ -113,8 +112,7 @@
                             <shadedArtifactAttached>true</shadedArtifactAttached>
                             <shadedClassifierName>coprocessor</shadedClassifierName>
                             <transformers>
-                                <transformer
-                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
+                                <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
                             </transformers>
                             <artifactSet>
                                 <includes>
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
index cbf81b6d17..b7693912cf 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
@@ -40,9 +40,9 @@
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.util.Threads;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.kylin.common.KylinConfig;
@@ -64,7 +64,7 @@
     private static final Logger logger = LoggerFactory.getLogger(HBaseConnection.class);
 
     private static final Map<String, Configuration> configCache = new ConcurrentHashMap<String, Configuration>();
-    private static final Map<String, HConnection> connPool = new ConcurrentHashMap<String, HConnection>();
+    private static final Map<String, Connection> connPool = new ConcurrentHashMap<String, Connection>();
     private static final ThreadLocal<Configuration> configThreadLocal = new ThreadLocal<>();
 
     private static ExecutorService coprocessorPool = null;
@@ -75,7 +75,7 @@
             public void run() {
                 closeCoprocessorPool();
 
-                for (HConnection conn : connPool.values()) {
+                for (Connection conn : connPool.values()) {
                     try {
                         conn.close();
                     } catch (IOException e) {
@@ -144,7 +144,7 @@ private static Configuration newHBaseConfiguration(String url) {
         // using a hbase:xxx URL is deprecated, instead hbase config is always loaded from hbase-site.xml in classpath
         if (!(StringUtils.isEmpty(url) || "hbase".equals(url)))
             throw new IllegalArgumentException("to use hbase storage, pls set 'kylin.storage.url=hbase' in kylin.properties");
-        
+
         Configuration conf = HBaseConfiguration.create(HadoopUtil.getCurrentConfiguration());
         addHBaseClusterNNHAConfiguration(conf);
 
@@ -213,9 +213,9 @@ public static String makeQualifiedPathInHBaseCluster(String path) {
 
     // ============================================================================
 
-    // returned HConnection can be shared by multiple threads and does not require close()
+    // returned Connection can be shared by multiple threads and does not require close()
     @SuppressWarnings("resource")
-    public static HConnection get(String url) {
+    public static Connection get(String url) {
         // find configuration
         Configuration conf = configCache.get(url);
         if (conf == null) {
@@ -223,13 +223,13 @@ public static HConnection get(String url) {
             configCache.put(url, conf);
         }
 
-        HConnection connection = connPool.get(url);
+        Connection connection = connPool.get(url);
         try {
             while (true) {
                 // I don't use DCL since recreate a connection is not a big issue.
                 if (connection == null || connection.isClosed()) {
                     logger.info("connection is null or closed, creating a new one");
-                    connection = HConnectionManager.createConnection(conf);
+                    connection = ConnectionFactory.createConnection(conf);
                     connPool.put(url, connection);
                 }
 
@@ -248,8 +248,8 @@ public static HConnection get(String url) {
         return connection;
     }
 
-    public static boolean tableExists(HConnection conn, String tableName) throws IOException {
-        HBaseAdmin hbase = new HBaseAdmin(conn);
+    public static boolean tableExists(Connection conn, String tableName) throws IOException {
+        Admin hbase = conn.getAdmin();
         try {
             return hbase.tableExists(TableName.valueOf(tableName));
         } finally {
@@ -269,18 +269,18 @@ public static void deleteTable(String hbaseUrl, String tableName) throws IOExcep
         deleteTable(HBaseConnection.get(hbaseUrl), tableName);
     }
 
-    public static void createHTableIfNeeded(HConnection conn, String table, String... families) throws IOException {
-        HBaseAdmin hbase = new HBaseAdmin(conn);
-
+    public static void createHTableIfNeeded(Connection conn, String table, String... families) throws IOException {
+        Admin hbase = conn.getAdmin();
+        TableName tableName = TableName.valueOf(table);
         try {
             if (tableExists(conn, table)) {
                 logger.debug("HTable '" + table + "' already exists");
-                Set<String> existingFamilies = getFamilyNames(hbase.getTableDescriptor(TableName.valueOf(table)));
+                Set<String> existingFamilies = getFamilyNames(hbase.getTableDescriptor(tableName));
                 boolean wait = false;
                 for (String family : families) {
                     if (existingFamilies.contains(family) == false) {
                         logger.debug("Adding family '" + family + "' to HTable '" + table + "'");
-                        hbase.addColumn(table, newFamilyDescriptor(family));
+                        hbase.addColumn(tableName, newFamilyDescriptor(family));
                         // addColumn() is async, is there a way to wait it finish?
                         wait = true;
                     }
@@ -333,8 +333,8 @@ private static HColumnDescriptor newFamilyDescriptor(String family) {
         return fd;
     }
 
-    public static void deleteTable(HConnection conn, String tableName) throws IOException {
-        HBaseAdmin hbase = new HBaseAdmin(conn);
+    public static void deleteTable(Connection conn, String tableName) throws IOException {
+        Admin hbase = conn.getAdmin();
 
         try {
             if (!tableExists(conn, tableName)) {
@@ -344,10 +344,10 @@ public static void deleteTable(HConnection conn, String tableName) throws IOExce
 
             logger.debug("delete HTable '" + tableName + "'");
 
-            if (hbase.isTableEnabled(tableName)) {
-                hbase.disableTable(tableName);
+            if (hbase.isTableEnabled(TableName.valueOf(tableName))) {
+                hbase.disableTable(TableName.valueOf(tableName));
             }
-            hbase.deleteTable(tableName);
+            hbase.deleteTable(TableName.valueOf(tableName));
 
             logger.debug("HTable '" + tableName + "' deleted");
         } finally {
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
index 3fd6426648..169510a6fa 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
@@ -31,14 +31,15 @@
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.FilterList;
@@ -69,7 +70,7 @@
     final String tableNameBase;
     final String hbaseUrl;
 
-    HConnection getConnection() throws IOException {
+    Connection getConnection() throws IOException {
         return HBaseConnection.get(hbaseUrl);
     }
 
@@ -120,7 +121,7 @@ private void visitFolder(String folderPath, Filter filter, FolderVisitor visitor
         byte[] endRow = Bytes.toBytes(lookForPrefix);
         endRow[endRow.length - 1]++;
 
-        HTableInterface table = getConnection().getTable(getAllInOneTableName());
+        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
         Scan scan = new Scan(startRow, endRow);
         if ((filter != null && filter instanceof KeyOnlyFilter) == false) {
             scan.addColumn(B_FAMILY, B_COLUMN_TS);
@@ -238,13 +239,12 @@ protected void putResourceImpl(String resPath, InputStream content, long ts) thr
         IOUtils.copy(content, bout);
         bout.close();
 
-        HTableInterface table = getConnection().getTable(getAllInOneTableName());
+        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
         try {
             byte[] row = Bytes.toBytes(resPath);
             Put put = buildPut(resPath, ts, row, bout.toByteArray(), table);
 
             table.put(put);
-            table.flushCommits();
         } finally {
             IOUtils.closeQuietly(table);
         }
@@ -252,7 +252,7 @@ protected void putResourceImpl(String resPath, InputStream content, long ts) thr
 
     @Override
     protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldTS, long newTS) throws IOException, IllegalStateException {
-        HTableInterface table = getConnection().getTable(getAllInOneTableName());
+        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
         try {
             byte[] row = Bytes.toBytes(resPath);
             byte[] bOldTS = oldTS == 0 ? null : Bytes.toBytes(oldTS);
@@ -265,8 +265,6 @@ protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldT
                 throw new IllegalStateException("Overwriting conflict " + resPath + ", expect old TS " + oldTS + ", but it is " + real);
             }
 
-            table.flushCommits();
-
             return newTS;
         } finally {
             IOUtils.closeQuietly(table);
@@ -275,7 +273,7 @@ protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldT
 
     @Override
     protected void deleteResourceImpl(String resPath) throws IOException {
-        HTableInterface table = getConnection().getTable(getAllInOneTableName());
+        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
         try {
             boolean hdfsResourceExist = false;
             Result result = internalGetFromHTable(table, resPath, true, false);
@@ -288,7 +286,6 @@ protected void deleteResourceImpl(String resPath) throws IOException {
 
             Delete del = new Delete(Bytes.toBytes(resPath));
             table.delete(del);
-            table.flushCommits();
 
             if (hdfsResourceExist) { // remove hdfs cell value
                 Path redirectPath = bigCellHDFSPath(resPath);
@@ -310,7 +307,7 @@ protected String getReadableResourcePathImpl(String resPath) {
     }
 
     private Result getFromHTable(String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
-        HTableInterface table = getConnection().getTable(getAllInOneTableName());
+        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
         try {
             return internalGetFromHTable(table, path, fetchContent, fetchTimestamp);
         } finally {
@@ -318,7 +315,7 @@ private Result getFromHTable(String path, boolean fetchContent, boolean fetchTim
         }
     }
 
-    private Result internalGetFromHTable(HTableInterface table, String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
+    private Result internalGetFromHTable(Table table, String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
         byte[] rowkey = Bytes.toBytes(path);
 
         Get get = new Get(rowkey);
@@ -337,7 +334,7 @@ private Result internalGetFromHTable(HTableInterface table, String path, boolean
         return exists ? result : null;
     }
 
-    private Path writeLargeCellToHdfs(String resPath, byte[] largeColumn, HTableInterface table) throws IOException {
+    private Path writeLargeCellToHdfs(String resPath, byte[] largeColumn, Table table) throws IOException {
         Path redirectPath = bigCellHDFSPath(resPath);
         Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
         FileSystem fileSystem = FileSystem.get(hconf);
@@ -363,7 +360,7 @@ public Path bigCellHDFSPath(String resPath) {
         return redirectPath;
     }
 
-    private Put buildPut(String resPath, long ts, byte[] row, byte[] content, HTableInterface table) throws IOException {
+    private Put buildPut(String resPath, long ts, byte[] row, byte[] content, Table table) throws IOException {
         int kvSizeLimit = Integer.parseInt(getConnection().getConfiguration().get("hbase.client.keyvalue.maxsize", "10485760"));
         if (content.length > kvSizeLimit) {
             writeLargeCellToHdfs(resPath, content, table);
@@ -371,8 +368,8 @@ private Put buildPut(String resPath, long ts, byte[] row, byte[] content, HTable
         }
 
         Put put = new Put(row);
-        put.add(B_FAMILY, B_COLUMN, content);
-        put.add(B_FAMILY, B_COLUMN_TS, Bytes.toBytes(ts));
+        put.addColumn(B_FAMILY, B_COLUMN, content);
+        put.addColumn(B_FAMILY, B_COLUMN_TS, Bytes.toBytes(ts));
 
         return put;
     }
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseStorage.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseStorage.java
index f4dfd2b8a7..3d82105d21 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseStorage.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseStorage.java
@@ -18,7 +18,6 @@
 
 package org.apache.kylin.storage.hbase;
 
-import com.google.common.base.Preconditions;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.debug.BackdoorToggles;
 import org.apache.kylin.cube.CubeInstance;
@@ -36,6 +35,8 @@
 import org.apache.kylin.storage.hbase.steps.HBaseMROutput;
 import org.apache.kylin.storage.hbase.steps.HBaseMROutput2Transition;
 
+import com.google.common.base.Preconditions;
+
 @SuppressWarnings("unused")
 //used by reflection
 public class HBaseStorage implements IStorage {
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
index b1411905a5..f63d9c23b4 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
@@ -26,12 +26,13 @@
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.BufferedMutator;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.kv.RowConstants;
@@ -86,14 +87,13 @@ public IGTScanner scan(GTScanRequest scanRequest) throws IOException {
     }
 
     private class Writer implements IGTWriter {
-        final HTableInterface table;
+        final BufferedMutator table;
         final ByteBuffer rowkey = ByteBuffer.allocate(50);
         final ByteBuffer value = ByteBuffer.allocate(50);
 
         Writer() throws IOException {
-            HConnection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-            table = conn.getTable(htableName);
-            table.setAutoFlush(false, true);
+            Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+            table = conn.getBufferedMutator(htableName);
         }
 
         @Override
@@ -113,24 +113,24 @@ public void write(GTRecord rec) throws IOException {
 
             Put put = new Put(rowkey);
             put.addImmutable(CF_B, ByteBuffer.wrap(COL_B), HConstants.LATEST_TIMESTAMP, value);
-            table.put(put);
+            table.mutate(put);
         }
 
         @Override
         public void close() throws IOException {
-            table.flushCommits();
+            table.flush();
             table.close();
         }
     }
 
     class Reader implements IGTScanner {
-        final HTableInterface table;
+        final Table table;
         final ResultScanner scanner;
 
         int count = 0;
 
         Reader() throws IOException {
-            HConnection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+            Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
             table = conn.getTable(htableName);
 
             Scan scan = new Scan();
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeSegmentTupleIterator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeSegmentTupleIterator.java
index 8ac3832d40..982a0443ed 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeSegmentTupleIterator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeSegmentTupleIterator.java
@@ -25,11 +25,12 @@
 import java.util.NoSuchElementException;
 import java.util.Set;
 
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.FuzzyRowFilter;
@@ -70,7 +71,7 @@
     protected final List<RowValueDecoder> rowValueDecoders;
     private final StorageContext context;
     private final String tableName;
-    private final HTableInterface table;
+    private final Table table;
 
     protected CubeTupleConverter tupleConverter;
     protected final Iterator<HBaseKeyRange> rangeIterator;
@@ -88,7 +89,7 @@
     private int advMeasureRowsRemaining;
     private int advMeasureRowIndex;
 
-    public CubeSegmentTupleIterator(CubeSegment cubeSeg, List<HBaseKeyRange> keyRanges, HConnection conn, //
+    public CubeSegmentTupleIterator(CubeSegment cubeSeg, List<HBaseKeyRange> keyRanges, Connection conn, //
             Set<TblColRef> dimensions, TupleFilter filter, Set<TblColRef> groupBy, //
             List<RowValueDecoder> rowValueDecoders, StorageContext context, TupleInfo returnTupleInfo) {
         this.cubeSeg = cubeSeg;
@@ -108,7 +109,7 @@ public CubeSegmentTupleIterator(CubeSegment cubeSeg, List<HBaseKeyRange> keyRang
         this.rangeIterator = keyRanges.iterator();
 
         try {
-            this.table = conn.getTable(tableName);
+            this.table = conn.getTable(TableName.valueOf(tableName));
         } catch (Throwable t) {
             throw new StorageException("Error when open connection to table " + tableName, t);
         }
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java
index ff729f43d3..19443274eb 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java
@@ -33,7 +33,7 @@
 import java.util.Set;
 import java.util.TreeSet;
 
-import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.BytesUtil;
 import org.apache.kylin.common.util.Dictionary;
@@ -46,10 +46,10 @@
 import org.apache.kylin.cube.cuboid.Cuboid;
 import org.apache.kylin.cube.kv.RowConstants;
 import org.apache.kylin.cube.model.CubeDesc;
+import org.apache.kylin.cube.model.CubeDesc.DeriveInfo;
 import org.apache.kylin.cube.model.HBaseColumnDesc;
 import org.apache.kylin.cube.model.HBaseMappingDesc;
 import org.apache.kylin.cube.model.RowKeyDesc;
-import org.apache.kylin.cube.model.CubeDesc.DeriveInfo;
 import org.apache.kylin.dict.lookup.LookupStringTable;
 import org.apache.kylin.measure.MeasureType;
 import org.apache.kylin.metadata.filter.ColumnTupleFilter;
@@ -152,7 +152,7 @@ public ITupleIterator search(StorageContext context, SQLDigest sqlDigest, TupleI
         setCoprocessor(groupsCopD, valueDecoders, context); // enable coprocessor if beneficial
         setLimit(filter, context);
 
-        HConnection conn = HBaseConnection.get(context.getConnUrl());
+        Connection conn = HBaseConnection.get(context.getConnUrl());
 
         // notice we're passing filterD down to storage instead of flatFilter
         return new SerializedHBaseTupleIterator(conn, scans, cubeInstance, dimensionsD, filterD, groupsCopD, valueDecoders, context, returnTupleInfo);
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/RegionScannerAdapter.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/RegionScannerAdapter.java
index 6342c5cbb6..0ade920e9c 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/RegionScannerAdapter.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/RegionScannerAdapter.java
@@ -26,6 +26,7 @@
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.hadoop.hbase.regionserver.ScannerContext;
 
 /**
  * @author yangli9
@@ -50,7 +51,7 @@ public boolean next(List<Cell> results) throws IOException {
     }
 
     @Override
-    public boolean next(List<Cell> result, int limit) throws IOException {
+    public boolean next(List<Cell> result, ScannerContext scannerContext) throws IOException {
         return next(result);
     }
 
@@ -60,7 +61,7 @@ public boolean nextRaw(List<Cell> result) throws IOException {
     }
 
     @Override
-    public boolean nextRaw(List<Cell> result, int limit) throws IOException {
+    public boolean nextRaw(List<Cell> result, ScannerContext scannerContext) throws IOException {
         return next(result);
     }
 
@@ -94,4 +95,9 @@ public long getMvccReadPoint() {
         return Long.MAX_VALUE;
     }
 
+    @Override
+    public int getBatch() {
+        return -1;
+    }
+
 }
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/SerializedHBaseTupleIterator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/SerializedHBaseTupleIterator.java
index e8dd5b9f45..d033c773ad 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/SerializedHBaseTupleIterator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/SerializedHBaseTupleIterator.java
@@ -25,7 +25,7 @@
 import java.util.NoSuchElementException;
 import java.util.Set;
 
-import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.metadata.filter.TupleFilter;
@@ -57,7 +57,7 @@
     private int scanCount;
     private ITuple next;
 
-    public SerializedHBaseTupleIterator(HConnection conn, List<HBaseKeyRange> segmentKeyRanges, CubeInstance cube, //
+    public SerializedHBaseTupleIterator(Connection conn, List<HBaseKeyRange> segmentKeyRanges, CubeInstance cube, //
             Set<TblColRef> dimensions, TupleFilter filter, Set<TblColRef> groupBy, List<RowValueDecoder> rowValueDecoders, //
             StorageContext context, TupleInfo returnTupleInfo) {
 
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserver.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserver.java
index 7139ca7d91..7e25e4cb3f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserver.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserver.java
@@ -26,7 +26,7 @@
 import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
 import org.apache.hadoop.hbase.regionserver.RegionScanner;
 import org.apache.kylin.gridtable.StorageSideBehavior;
@@ -99,7 +99,7 @@ private RegionScanner doPostScannerObserver(final ObserverContext<RegionCoproces
         // start/end region operation & sync on scanner is suggested by the
         // javadoc of RegionScanner.nextRaw()
         // FIXME: will the lock still work when a iterator is returned? is it safe? Is readonly attribute helping here? by mhb
-        HRegion region = ctxt.getEnvironment().getRegion();
+        Region region = ctxt.getEnvironment().getRegion();
         region.startRegionOperation();
         try {
             synchronized (innerScanner) {
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java
index a900ea1fdc..d64f48fdd3 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java
@@ -25,6 +25,7 @@
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.hadoop.hbase.regionserver.ScannerContext;
 import org.apache.kylin.gridtable.StorageSideBehavior;
 import org.apache.kylin.measure.MeasureAggregator;
 import org.apache.kylin.storage.hbase.common.coprocessor.AggrKey;
@@ -116,8 +117,8 @@ public boolean next(List<Cell> results) throws IOException {
     }
 
     @Override
-    public boolean next(List<Cell> result, int limit) throws IOException {
-        return outerScanner.next(result, limit);
+    public boolean next(List<Cell> result, ScannerContext scannerContext) throws IOException {
+        return outerScanner.next(result, scannerContext);
     }
 
     @Override
@@ -126,8 +127,8 @@ public boolean nextRaw(List<Cell> result) throws IOException {
     }
 
     @Override
-    public boolean nextRaw(List<Cell> result, int limit) throws IOException {
-        return outerScanner.nextRaw(result, limit);
+    public boolean nextRaw(List<Cell> result, ScannerContext scannerContext) throws IOException {
+        return outerScanner.nextRaw(result, scannerContext);
     }
 
     @Override
@@ -160,6 +161,11 @@ public long getMvccReadPoint() {
         return outerScanner.getMvccReadPoint();
     }
 
+    @Override
+    public int getBatch() {
+        return outerScanner.getBatch();
+    }
+
     private static class Stats {
         long inputRows = 0;
         long inputBytes = 0;
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverAggregationCache.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverAggregationCache.java
index 8404262346..331e34d967 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverAggregationCache.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverAggregationCache.java
@@ -30,6 +30,7 @@
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.Type;
 import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.hadoop.hbase.regionserver.ScannerContext;
 import org.apache.kylin.measure.MeasureAggregator;
 import org.apache.kylin.storage.hbase.common.coprocessor.AggrKey;
 import org.apache.kylin.storage.hbase.common.coprocessor.AggregationCache;
@@ -112,7 +113,7 @@ private void makeCells(Entry<AggrKey, MeasureAggregator[]> entry, List<Cell> res
         }
 
         @Override
-        public boolean next(List<Cell> result, int limit) throws IOException {
+        public boolean next(List<Cell> result, ScannerContext scannerContext) throws IOException {
             return next(result);
         }
 
@@ -122,7 +123,7 @@ public boolean nextRaw(List<Cell> result) throws IOException {
         }
 
         @Override
-        public boolean nextRaw(List<Cell> result, int limit) throws IOException {
+        public boolean nextRaw(List<Cell> result, ScannerContext scannerContext) throws IOException {
             return next(result);
         }
 
@@ -161,6 +162,11 @@ public long getMvccReadPoint() {
             // AggregateRegionObserver.LOG.info("Kylin Scanner getMvccReadPoint()");
             return Long.MAX_VALUE;
         }
+
+        @Override
+        public int getBatch() {
+            return innerScanner.getBatch();
+        }
     }
 
 }
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverEnabler.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverEnabler.java
index 394b3e229a..9fd33f5e4f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverEnabler.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverEnabler.java
@@ -23,9 +23,9 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.regionserver.RegionScanner;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.debug.BackdoorToggles;
@@ -60,7 +60,7 @@
     static final Map<String, Boolean> CUBE_OVERRIDES = Maps.newConcurrentMap();
 
     public static ResultScanner scanWithCoprocessorIfBeneficial(CubeSegment segment, Cuboid cuboid, TupleFilter tupleFiler, //
-            Collection<TblColRef> groupBy, Collection<RowValueDecoder> rowValueDecoders, StorageContext context, HTableInterface table, Scan scan) throws IOException {
+            Collection<TblColRef> groupBy, Collection<RowValueDecoder> rowValueDecoders, StorageContext context, Table table, Scan scan) throws IOException {
 
         if (context.isCoprocessorEnabled() == false) {
             return table.getScanner(scan);
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
index 4f538ae32c..f879e2b800 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
@@ -26,8 +26,9 @@
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.zip.DataFormatException;
 
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
 import org.apache.hadoop.hbase.ipc.ServerRpcController;
@@ -50,10 +51,10 @@
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest;
-import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse;
-import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitService;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList;
+import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse.Stats;
+import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitService;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -116,7 +117,7 @@ public IGTScanner getGTScanner(final GTScanRequest scanRequest) throws IOExcepti
         final ImmutableBitSet selectedColBlocks = scanRequest.getSelectedColBlocks().set(0);
 
         // globally shared connection, does not require close
-        final HConnection conn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
+        final Connection conn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
 
         final List<IntList> hbaseColumnsToGTIntList = Lists.newArrayList();
         List<List<Integer>> hbaseColumnsToGT = getHBaseColumnsGTMapping(selectedColBlocks);
@@ -128,10 +129,12 @@ public IGTScanner getGTScanner(final GTScanRequest scanRequest) throws IOExcepti
         List<RawScan> rawScans = preparedHBaseScans(scanRequest.getGTScanRanges(), selectedColBlocks);
         rawScanByteString = serializeRawScans(rawScans);
         
+        int coprocessorTimeout = getCoprocessorTimeoutMillis();
+        scanRequest.setTimeout(coprocessorTimeout);
         scanRequest.clearScanRanges();//since raw scans are sent to coprocessor, we don't need to duplicate sending it
-        final ExpectedSizeIterator epResultItr = new ExpectedSizeIterator(shardNum);
-        scanRequest.setTimeout(epResultItr.getRpcTimeout());
         scanRequestByteString = serializeGTScanReq(scanRequest);
+
+        final ExpectedSizeIterator epResultItr = new ExpectedSizeIterator(shardNum, coprocessorTimeout);
         
         logger.info("Serialized scanRequestBytes {} bytes, rawScanBytesString {} bytes", scanRequestByteString.size(), rawScanByteString.size());
 
@@ -169,7 +172,7 @@ public void run() {
                     final boolean[] abnormalFinish = new boolean[1];
 
                     try {
-                        HTableInterface table = conn.getTable(cubeSeg.getStorageLocationIdentifier(), HBaseConnection.getCoprocessorPool());
+                        Table table = conn.getTable(TableName.valueOf(cubeSeg.getStorageLocationIdentifier()), HBaseConnection.getCoprocessorPool());
 
                         final CubeVisitRequest request = builder.build();
                         final byte[] startKey = epRange.getFirst();
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
index da087c9876..05b34c7445 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
@@ -22,11 +22,14 @@
 import java.util.Collections;
 import java.util.List;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.FilterList;
 import org.apache.hadoop.hbase.filter.FuzzyRowFilter;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.debug.BackdoorToggles;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.common.util.Pair;
@@ -45,6 +48,7 @@
 import org.apache.kylin.gridtable.GTRecord;
 import org.apache.kylin.gridtable.GTScanRange;
 import org.apache.kylin.gridtable.IGTStorage;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -274,4 +278,24 @@ protected void logScan(RawScan rawScan, String tableName) {
         logger.info(info.toString());
     }
 
+    protected int getCoprocessorTimeoutMillis() {
+        int configTimeout = cubeSeg.getConfig().getQueryCoprocessorTimeoutSeconds() * 1000;
+        if (configTimeout == 0) {
+            configTimeout = Integer.MAX_VALUE;
+        }
+
+        Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
+        int rpcTimeout = hconf.getInt(HConstants.HBASE_RPC_TIMEOUT_KEY, HConstants.DEFAULT_HBASE_RPC_TIMEOUT);
+        // final timeout should be smaller than rpc timeout
+        int upper = (int) (rpcTimeout * 0.9);
+
+        int timeout = Math.min(upper, configTimeout);
+        if (BackdoorToggles.getQueryTimeout() != -1) {
+            timeout = Math.min(upper, BackdoorToggles.getQueryTimeout());
+        }
+
+        logger.debug("{} = {} ms, use {} ms as timeout for coprocessor", HConstants.HBASE_RPC_TIMEOUT_KEY, rpcTimeout, timeout);
+        return timeout;
+    }
+
 }
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
index 3cefc5fbf7..a52af902a4 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
@@ -24,11 +24,12 @@
 import java.util.List;
 
 import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.util.BytesUtil;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.common.util.ShardingHash;
@@ -154,8 +155,8 @@ private IGTScanner getGTScannerInternal(final GTScanRequest scanRequest) throws
         // primary key (also the 0th column block) is always selected
         final ImmutableBitSet selectedColBlocks = scanRequest.getSelectedColBlocks().set(0);
         // globally shared connection, does not require close
-        HConnection hbaseConn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
-        final HTableInterface hbaseTable = hbaseConn.getTable(cubeSeg.getStorageLocationIdentifier());
+        Connection hbaseConn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
+        final Table hbaseTable = hbaseConn.getTable(TableName.valueOf(cubeSeg.getStorageLocationIdentifier()));
 
         List<RawScan> rawScans = preparedHBaseScans(scanRequest.getGTScanRanges(), selectedColBlocks);
         List<List<Integer>> hbaseColumnsToGT = getHBaseColumnsGTMapping(selectedColBlocks);
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
index c27e5fc6fa..2d574bd27d 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
@@ -24,50 +24,25 @@
 import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.lang.NotImplementedException;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.debug.BackdoorToggles;
 import org.apache.kylin.gridtable.GTScanRequest;
-import org.apache.kylin.gridtable.GTScanSelfTerminatedException;
-import org.apache.kylin.storage.hbase.HBaseConnection;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
-class ExpectedSizeIterator implements Iterator<byte[]> {
-    private static final Logger logger = LoggerFactory.getLogger(ExpectedSizeIterator.class);
-
-    BlockingQueue<byte[]> queue;
+import com.google.common.base.Throwables;
 
-    int expectedSize;
-    int current = 0;
-    long rpcTimeout;
-    long timeout;
-    long timeoutTS;
-    volatile Throwable coprocException;
-
-    public ExpectedSizeIterator(int expectedSize) {
+class ExpectedSizeIterator implements Iterator<byte[]> {
+    private BlockingQueue<byte[]> queue;
+    private int expectedSize;
+    private int current = 0;
+    private int coprocessorTimeout;
+    private long deadline;
+    private volatile Throwable coprocException;
+
+    public ExpectedSizeIterator(int expectedSize, int coprocessorTimeout) {
         this.expectedSize = expectedSize;
         this.queue = new ArrayBlockingQueue<byte[]>(expectedSize);
 
-        StringBuilder sb = new StringBuilder();
-        Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
-
-        this.rpcTimeout = hconf.getInt(HConstants.HBASE_RPC_TIMEOUT_KEY, HConstants.DEFAULT_HBASE_RPC_TIMEOUT);
-        this.timeout = this.rpcTimeout * hconf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER);
-        sb.append("rpc timeout is " + this.rpcTimeout + " and after multiply retry times becomes " + this.timeout);
-
-        this.timeout *= KylinConfig.getInstanceFromEnv().getCubeVisitTimeoutTimes();
-        sb.append(" after multiply kylin.query.cube.visit.timeout.times becomes " + this.timeout);
-
-        logger.info(sb.toString());
-
-        if (BackdoorToggles.getQueryTimeout() != -1) {
-            this.timeout = BackdoorToggles.getQueryTimeout();
-            logger.info("rpc timeout is overwritten to " + this.timeout);
-        }
-
-        this.timeoutTS = System.currentTimeMillis() + 2 * this.timeout;//longer timeout than coprocessor so that query thread will not timeout faster than coprocessor
+        this.coprocessorTimeout = coprocessorTimeout;
+        //longer timeout than coprocessor so that query thread will not timeout faster than coprocessor
+        this.deadline = System.currentTimeMillis() + coprocessorTimeout * 10;
     }
 
     @Override
@@ -84,22 +59,20 @@ public boolean hasNext() {
             current++;
             byte[] ret = null;
 
-            while (ret == null && coprocException == null && timeoutTS > System.currentTimeMillis()) {
-                ret = queue.poll(10000, TimeUnit.MILLISECONDS);
+            while (ret == null && coprocException == null && deadline > System.currentTimeMillis()) {
+                ret = queue.poll(1000, TimeUnit.MILLISECONDS);
             }
 
             if (coprocException != null) {
-                if (coprocException instanceof GTScanSelfTerminatedException)
-                    throw (GTScanSelfTerminatedException) coprocException;
-                else
-                    throw new RuntimeException("Error in coprocessor", coprocException);
+                throw Throwables.propagate(coprocException);
+            }
 
-            } else if (ret == null) {
+            if (ret == null) {
                 throw new RuntimeException("Timeout visiting cube! Check why coprocessor exception is not sent back? In coprocessor Self-termination is checked every " + //
-                        GTScanRequest.terminateCheckInterval + " scanned rows, the configured timeout(" + timeout + ") cannot support this many scans?");
-            } else {
-                return ret;
+                        GTScanRequest.terminateCheckInterval + " scanned rows, the configured timeout(" + coprocessorTimeout + ") cannot support this many scans?");
             }
+
+            return ret;
         } catch (InterruptedException e) {
             throw new RuntimeException("Error when waiting queue", e);
         }
@@ -118,10 +91,6 @@ public void append(byte[] data) {
         }
     }
 
-    public long getRpcTimeout() {
-        return this.timeout;
-    }
-
     public void notifyCoprocException(Throwable ex) {
         coprocException = ex;
     }
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
index 506778cc8d..f0949daf56 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
@@ -142,7 +142,7 @@ private void updateRawScanByCurrentRegion(RawScan rawScan, HRegion region, int s
         if (shardLength == 0) {
             return;
         }
-        byte[] regionStartKey = ArrayUtils.isEmpty(region.getStartKey()) ? new byte[shardLength] : region.getStartKey();
+        byte[] regionStartKey = ArrayUtils.isEmpty(region.getRegionInfo().getStartKey()) ? new byte[shardLength] : region.getRegionInfo().getStartKey();
         Bytes.putBytes(rawScan.startKey, 0, regionStartKey, 0, shardLength);
         Bytes.putBytes(rawScan.endKey, 0, regionStartKey, 0, shardLength);
     }
@@ -176,10 +176,10 @@ public void visitCube(final RpcController controller, final CubeVisitProtos.Cube
         String debugGitTag = "";
 
         String queryId = request.hasQueryId() ? request.getQueryId() : "UnknownId";
-        try (SetThreadName ignored = new SetThreadName("Kylin Query-%s", queryId)) {
+        try (SetThreadName ignored = new SetThreadName("Query %s", queryId)) {
             this.serviceStartTime = System.currentTimeMillis();
 
-            region = env.getRegion();
+            region = (HRegion)env.getRegion();
             region.startRegionOperation();
 
             // if user change kylin.properties on kylin server, need to manually redeploy coprocessor jar to update KylinConfig of Env.
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
index 2814ad6f42..feb48421b7 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
@@ -26,7 +26,8 @@
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.regionserver.BloomType;
@@ -79,7 +80,8 @@ public static void createHTable(CubeSegment cubeSegment, byte[][] splitKeys) thr
         tableDesc.setValue(IRealizationConstants.HTableSegmentTag, cubeSegment.toString());
 
         Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
-        HBaseAdmin admin = new HBaseAdmin(conf);
+        Connection conn = HBaseConnection.get(kylinConfig.getStorageUrl());
+        Admin admin = conn.getAdmin();
 
         try {
             if (User.isHBaseSecurityEnabled(conf)) {
@@ -92,7 +94,7 @@ public static void createHTable(CubeSegment cubeSegment, byte[][] splitKeys) thr
                 tableDesc.addFamily(cf);
             }
 
-            if (admin.tableExists(tableName)) {
+            if (admin.tableExists(TableName.valueOf(tableName))) {
                 // admin.disableTable(tableName);
                 // admin.deleteTable(tableName);
                 throw new RuntimeException("HBase table " + tableName + " exists!");
@@ -101,7 +103,7 @@ public static void createHTable(CubeSegment cubeSegment, byte[][] splitKeys) thr
             DeployCoprocessorCLI.deployCoprocessor(tableDesc);
 
             admin.createTable(tableDesc, splitKeys);
-            Preconditions.checkArgument(admin.isTableAvailable(tableName), "table " + tableName + " created, but is not available due to some reasons");
+            Preconditions.checkArgument(admin.isTableAvailable(TableName.valueOf(tableName)), "table " + tableName + " created, but is not available due to some reasons");
             logger.info("create hbase table " + tableName + " done.");
         } finally {
             IOUtils.closeQuietly(admin);
@@ -110,8 +112,7 @@ public static void createHTable(CubeSegment cubeSegment, byte[][] splitKeys) thr
     }
 
     public static void deleteHTable(TableName tableName) throws IOException {
-        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
-        HBaseAdmin admin = new HBaseAdmin(conf);
+        Admin admin = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl()).getAdmin();
         try {
             if (admin.tableExists(tableName)) {
                 logger.info("disabling hbase table " + tableName);
@@ -126,8 +127,7 @@ public static void deleteHTable(TableName tableName) throws IOException {
 
     /** create a HTable that has the same performance settings as normal cube table, for benchmark purpose */
     public static void createBenchmarkHTable(TableName tableName, String cfName) throws IOException {
-        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
-        HBaseAdmin admin = new HBaseAdmin(conf);
+        Admin admin = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl()).getAdmin();
         try {
             if (admin.tableExists(tableName)) {
                 logger.info("disabling hbase table " + tableName);
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
index 21f1c007fe..0f214eb262 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
@@ -29,9 +29,10 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.engine.mr.HadoopUtil;
 import org.apache.kylin.job.exception.ExecuteException;
 import org.apache.kylin.job.execution.AbstractExecutable;
@@ -100,19 +101,21 @@ private void dropHBaseTable(ExecutableContext context) throws IOException {
         List<String> oldTables = getOldHTables();
         if (oldTables != null && oldTables.size() > 0) {
             String metadataUrlPrefix = KylinConfig.getInstanceFromEnv().getMetadataUrlPrefix();
-            Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
-            HBaseAdmin admin = null;
+            Admin admin = null;
             try {
-                admin = new HBaseAdmin(conf);
+
+                Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+                admin = conn.getAdmin();
+
                 for (String table : oldTables) {
-                    if (admin.tableExists(table)) {
-                        HTableDescriptor tableDescriptor = admin.getTableDescriptor(Bytes.toBytes(table));
+                    if (admin.tableExists(TableName.valueOf(table))) {
+                        HTableDescriptor tableDescriptor = admin.getTableDescriptor(TableName.valueOf(table));
                         String host = tableDescriptor.getValue(IRealizationConstants.HTableTag);
                         if (metadataUrlPrefix.equalsIgnoreCase(host)) {
-                            if (admin.isTableEnabled(table)) {
-                                admin.disableTable(table);
+                            if (admin.isTableEnabled(TableName.valueOf(table))) {
+                                admin.disableTable(TableName.valueOf(table));
                             }
-                            admin.deleteTable(table);
+                            admin.deleteTable(TableName.valueOf(table));
                             logger.debug("Dropped HBase table " + table);
                             output.append("Dropped HBase table " + table + " \n");
                         } else {
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
index d5b36dfb9f..6587d4e679 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
@@ -23,8 +23,8 @@
 
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.cube.cuboid.Cuboid;
@@ -49,7 +49,7 @@
 
     private final List<KeyValueCreator> keyValueCreators;
     private final int nColumns;
-    private final HTableInterface hTable;
+    private final Table hTable;
     private final CubeDesc cubeDesc;
     private final CubeSegment cubeSegment;
     private final Object[] measureValues;
@@ -58,7 +58,7 @@
     private AbstractRowKeyEncoder rowKeyEncoder;
     private byte[] keybuf;
 
-    public HBaseCuboidWriter(CubeSegment segment, HTableInterface hTable) {
+    public HBaseCuboidWriter(CubeSegment segment, Table hTable) {
         this.keyValueCreators = Lists.newArrayList();
         this.cubeSegment = segment;
         this.cubeDesc = cubeSegment.getCubeDesc();
@@ -117,7 +117,6 @@ public final void flush() throws IOException {
             long t = System.currentTimeMillis();
             if (hTable != null) {
                 hTable.put(puts);
-                hTable.flushCommits();
             }
             logger.info("commit total " + puts.size() + " puts, totally cost:" + (System.currentTimeMillis() - t) + "ms");
             puts.clear();
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
index 5b2441c58d..2f7e164160 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
@@ -24,11 +24,11 @@
 import java.util.List;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.job.exception.ExecuteException;
 import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableContext;
@@ -69,19 +69,20 @@ protected ExecuteResult doWork(ExecutableContext context) throws ExecuteExceptio
         List<String> oldTables = getOldHTables();
         if (oldTables != null && oldTables.size() > 0) {
             String metadataUrlPrefix = KylinConfig.getInstanceFromEnv().getMetadataUrlPrefix();
-            Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
-            HBaseAdmin admin = null;
+            Admin admin = null;
             try {
-                admin = new HBaseAdmin(conf);
+                Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+                admin = conn.getAdmin();
+
                 for (String table : oldTables) {
-                    if (admin.tableExists(table)) {
-                        HTableDescriptor tableDescriptor = admin.getTableDescriptor(Bytes.toBytes(table));
+                    if (admin.tableExists(TableName.valueOf(table))) {
+                        HTableDescriptor tableDescriptor = admin.getTableDescriptor(TableName.valueOf((table)));
                         String host = tableDescriptor.getValue(IRealizationConstants.HTableTag);
                         if (metadataUrlPrefix.equalsIgnoreCase(host)) {
-                            if (admin.isTableEnabled(table)) {
-                                admin.disableTable(table);
+                            if (admin.isTableEnabled(TableName.valueOf(table))) {
+                                admin.disableTable(TableName.valueOf(table));
                             }
-                            admin.deleteTable(table);
+                            admin.deleteTable(TableName.valueOf(table));
                             logger.debug("Dropped htable: " + table);
                             output.append("HBase table " + table + " is dropped. \n");
                         } else {
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
index a150607113..56f867ac2d 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
@@ -21,9 +21,11 @@
 import java.io.IOException;
 
 import org.apache.commons.cli.Options;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
@@ -38,8 +40,8 @@
     protected static final Logger logger = LoggerFactory.getLogger(CleanHtableCLI.class);
 
     private void clean() throws IOException {
-        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
-        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
+        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+        Admin hbaseAdmin = conn.getAdmin();
 
         for (HTableDescriptor descriptor : hbaseAdmin.listTables()) {
             String name = descriptor.getNameAsString().toLowerCase();
@@ -50,7 +52,7 @@ private void clean() throws IOException {
                 System.out.println();
 
                 descriptor.setValue(IRealizationConstants.HTableOwner, "DL-eBay-Kylin@ebay.com");
-                hbaseAdmin.modifyTable(descriptor.getNameAsString(), descriptor);
+                hbaseAdmin.modifyTable(TableName.valueOf(descriptor.getNameAsString()), descriptor);
             }
         }
         hbaseAdmin.close();
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
index 52aa7eab2c..b78e3d70f3 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
@@ -33,12 +33,13 @@
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.RawResource;
@@ -86,7 +87,7 @@
     private static ResourceStore srcStore;
     private static ResourceStore dstStore;
     private static FileSystem hdfsFS;
-    private static HBaseAdmin hbaseAdmin;
+    private static Admin hbaseAdmin;
 
     public static final String ACL_INFO_FAMILY = "i";
     private static final String ACL_TABLE_NAME = "_acl";
@@ -130,8 +131,8 @@ public static void moveCube(KylinConfig srcCfg, KylinConfig dstCfg, String cubeN
 
         checkAndGetHbaseUrl();
 
-        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
-        hbaseAdmin = new HBaseAdmin(conf);
+        Connection conn = HBaseConnection.get(srcConfig.getStorageUrl());
+        hbaseAdmin = conn.getAdmin();
 
         hdfsFS = FileSystem.get(new Configuration());
 
@@ -229,6 +230,7 @@ private static void copyFilesInMetaStore(CubeInstance cube, String overwriteIfEx
             operations.add(new Opt(OptType.COPY_DICT_OR_SNAPSHOT, new Object[] { item, cube.getName() }));
         }
     }
+
     private static void addCubeAndModelIntoProject(CubeInstance srcCube, String cubeName, String projectName) throws IOException {
         String projectResPath = ProjectInstance.concatResourcePath(projectName);
         if (!dstStore.exists(projectResPath))
@@ -322,8 +324,8 @@ private static void doOpt(Opt opt) throws IOException, InterruptedException {
 
         switch (opt.type) {
         case CHANGE_HTABLE_HOST: {
-            String tableName = (String) opt.params[0];
-            HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
+            TableName tableName = TableName.valueOf((String) opt.params[0]);
+            HTableDescriptor desc = hbaseAdmin.getTableDescriptor(tableName);
             hbaseAdmin.disableTable(tableName);
             desc.setValue(IRealizationConstants.HTableTag, dstConfig.getMetadataUrlPrefix());
             hbaseAdmin.modifyTable(tableName, desc);
@@ -445,11 +447,11 @@ private static void doOpt(Opt opt) throws IOException, InterruptedException {
             Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
             ProjectInstance project = dstStore.getResource(projectResPath, ProjectInstance.class, projectSerializer);
             String projUUID = project.getUuid();
-            HTableInterface srcAclHtable = null;
-            HTableInterface destAclHtable = null;
+            Table srcAclHtable = null;
+            Table destAclHtable = null;
             try {
-                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
+                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(TableName.valueOf(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
 
                 // cube acl
                 Result result = srcAclHtable.get(new Get(Bytes.toBytes(cubeId)));
@@ -469,7 +471,6 @@ private static void doOpt(Opt opt) throws IOException, InterruptedException {
                         destAclHtable.put(put);
                     }
                 }
-                destAclHtable.flushCommits();
             } finally {
                 IOUtils.closeQuietly(srcAclHtable);
                 IOUtils.closeQuietly(destAclHtable);
@@ -500,8 +501,8 @@ private static void undo(Opt opt) throws IOException, InterruptedException {
 
         switch (opt.type) {
         case CHANGE_HTABLE_HOST: {
-            String tableName = (String) opt.params[0];
-            HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
+            TableName tableName = TableName.valueOf((String) opt.params[0]);
+            HTableDescriptor desc = hbaseAdmin.getTableDescriptor(tableName);
             hbaseAdmin.disableTable(tableName);
             desc.setValue(IRealizationConstants.HTableTag, srcConfig.getMetadataUrlPrefix());
             hbaseAdmin.modifyTable(tableName, desc);
@@ -535,13 +536,12 @@ private static void undo(Opt opt) throws IOException, InterruptedException {
         case COPY_ACL: {
             String cubeId = (String) opt.params[0];
             String modelId = (String) opt.params[1];
-            HTableInterface destAclHtable = null;
+            Table destAclHtable = null;
             try {
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
 
                 destAclHtable.delete(new Delete(Bytes.toBytes(cubeId)));
                 destAclHtable.delete(new Delete(Bytes.toBytes(modelId)));
-                destAclHtable.flushCommits();
             } finally {
                 IOUtils.closeQuietly(destAclHtable);
             }
@@ -558,7 +558,7 @@ private static void undo(Opt opt) throws IOException, InterruptedException {
         }
     }
 
-    private static void updateMeta(KylinConfig config){
+    private static void updateMeta(KylinConfig config) {
         String[] nodes = config.getRestServers();
         for (String node : nodes) {
             RestClient restClient = new RestClient(node);
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
index 295750af70..efda4e40c6 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
@@ -26,10 +26,10 @@
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.cube.CubeInstance;
@@ -60,7 +60,7 @@
     private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("The name of cube migrated").create("cube");
 
     private KylinConfig dstCfg;
-    private HBaseAdmin hbaseAdmin;
+    private Admin hbaseAdmin;
 
     private List<String> issueExistHTables;
     private List<String> inconsistentHTables;
@@ -128,9 +128,8 @@ public CubeMigrationCheckCLI(KylinConfig kylinConfig, Boolean isFix) throws IOEx
         this.dstCfg = kylinConfig;
         this.ifFix = isFix;
 
-        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
-        hbaseAdmin = new HBaseAdmin(conf);
-
+        Connection conn = HBaseConnection.get(kylinConfig.getStorageUrl());
+        hbaseAdmin = conn.getAdmin();
         issueExistHTables = Lists.newArrayList();
         inconsistentHTables = Lists.newArrayList();
     }
@@ -187,10 +186,10 @@ public void fixInconsistent() throws IOException {
                 String[] sepNameList = segFullName.split(",");
                 HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0]));
                 logger.info("Change the host of htable " + sepNameList[0] + "belonging to cube " + sepNameList[1] + " from " + desc.getValue(IRealizationConstants.HTableTag) + " to " + dstCfg.getMetadataUrlPrefix());
-                hbaseAdmin.disableTable(sepNameList[0]);
+                hbaseAdmin.disableTable(TableName.valueOf(sepNameList[0]));
                 desc.setValue(IRealizationConstants.HTableTag, dstCfg.getMetadataUrlPrefix());
-                hbaseAdmin.modifyTable(sepNameList[0], desc);
-                hbaseAdmin.enableTable(sepNameList[0]);
+                hbaseAdmin.modifyTable(TableName.valueOf(sepNameList[0]), desc);
+                hbaseAdmin.enableTable(TableName.valueOf(sepNameList[0]));
             }
         } else {
             logger.info("------ Inconsistent HTables Needed To Be Fixed ------");
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
index 8f69c18711..fb6e8ecbc6 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
@@ -437,4 +437,4 @@ private static Path getCoprocessorHDFSDir(FileSystem fileSystem, KylinConfig con
 
         return result;
     }
-}
+}
\ No newline at end of file
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
index b883df241e..8c2299ce39 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
@@ -26,10 +26,11 @@
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -231,9 +232,9 @@ private void copyAcl(String origCubeId, String newCubeId, String projectName) th
         Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
         ProjectInstance project = store.getResource(projectResPath, ProjectInstance.class, projectSerializer);
         String projUUID = project.getUuid();
-        HTableInterface aclHtable = null;
+        Table aclHtable = null;
         try {
-            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(kylinConfig.getMetadataUrlPrefix() + "_acl");
+            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(TableName.valueOf(kylinConfig.getMetadataUrlPrefix() + "_acl"));
 
             // cube acl
             Result result = aclHtable.get(new Get(Bytes.toBytes(origCubeId)));
@@ -253,7 +254,6 @@ private void copyAcl(String origCubeId, String newCubeId, String projectName) th
                     aclHtable.put(put);
                 }
             }
-            aclHtable.flushCommits();
         } finally {
             IOUtils.closeQuietly(aclHtable);
         }
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
index 86ba22fee5..dd5f8fa2f7 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
@@ -28,13 +28,13 @@
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.Pair;
@@ -75,7 +75,7 @@ public static void testGridTable(double hitRatio, double indexRatio) throws IOEx
         System.out.println("Testing grid table scanning, hit ratio " + hitRatio + ", index ratio " + indexRatio);
         String hbaseUrl = "hbase"; // use hbase-site.xml on classpath
 
-        HConnection conn = HBaseConnection.get(hbaseUrl);
+        Connection conn = HBaseConnection.get(hbaseUrl);
         createHTableIfNeeded(conn, TEST_TABLE);
         prepareData(conn);
 
@@ -91,10 +91,10 @@ public static void testGridTable(double hitRatio, double indexRatio) throws IOEx
 
     }
 
-    private static void testColumnScan(HConnection conn, List<Pair<Integer, Integer>> colScans) throws IOException {
+    private static void testColumnScan(Connection conn, List<Pair<Integer, Integer>> colScans) throws IOException {
         Stats stats = new Stats("COLUMN_SCAN");
 
-        HTableInterface table = conn.getTable(TEST_TABLE);
+        Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
         try {
             stats.markStart();
 
@@ -122,20 +122,20 @@ private static void testColumnScan(HConnection conn, List<Pair<Integer, Integer>
         }
     }
 
-    private static void testRowScanNoIndexFullScan(HConnection conn, boolean[] hits) throws IOException {
+    private static void testRowScanNoIndexFullScan(Connection conn, boolean[] hits) throws IOException {
         fullScan(conn, hits, new Stats("ROW_SCAN_NO_IDX_FULL"));
     }
 
-    private static void testRowScanNoIndexSkipScan(HConnection conn, boolean[] hits) throws IOException {
+    private static void testRowScanNoIndexSkipScan(Connection conn, boolean[] hits) throws IOException {
         jumpScan(conn, hits, new Stats("ROW_SCAN_NO_IDX_SKIP"));
     }
 
-    private static void testRowScanWithIndex(HConnection conn, boolean[] hits) throws IOException {
+    private static void testRowScanWithIndex(Connection conn, boolean[] hits) throws IOException {
         jumpScan(conn, hits, new Stats("ROW_SCAN_IDX"));
     }
 
-    private static void fullScan(HConnection conn, boolean[] hits, Stats stats) throws IOException {
-        HTableInterface table = conn.getTable(TEST_TABLE);
+    private static void fullScan(Connection conn, boolean[] hits, Stats stats) throws IOException {
+        Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
         try {
             stats.markStart();
 
@@ -156,11 +156,11 @@ private static void fullScan(HConnection conn, boolean[] hits, Stats stats) thro
         }
     }
 
-    private static void jumpScan(HConnection conn, boolean[] hits, Stats stats) throws IOException {
+    private static void jumpScan(Connection conn, boolean[] hits, Stats stats) throws IOException {
 
         final int jumpThreshold = 6; // compensate for Scan() overhead, totally by experience
 
-        HTableInterface table = conn.getTable(TEST_TABLE);
+        Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
         try {
 
             stats.markStart();
@@ -204,8 +204,8 @@ private static void jumpScan(HConnection conn, boolean[] hits, Stats stats) thro
         }
     }
 
-    private static void prepareData(HConnection conn) throws IOException {
-        HTableInterface table = conn.getTable(TEST_TABLE);
+    private static void prepareData(Connection conn) throws IOException {
+        Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
 
         try {
             // check how many rows existing
@@ -258,8 +258,8 @@ private static void dot(int i, int nRows) {
         return bytes;
     }
 
-    private static void createHTableIfNeeded(HConnection conn, String tableName) throws IOException {
-        HBaseAdmin hbase = new HBaseAdmin(conn);
+    private static void createHTableIfNeeded(Connection conn, String tableName) throws IOException {
+        Admin hbase = conn.getAdmin();
 
         try {
             boolean tableExist = false;
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
index 6749d6cca6..940d64ab85 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
@@ -24,9 +24,11 @@
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
@@ -55,8 +57,8 @@
     private void cleanUp() {
         try {
             // get all kylin hbase tables
-            Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
-            HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
+            Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+            Admin hbaseAdmin = conn.getAdmin();
             String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
             HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
             List<String> allTablesNeedToBeDropped = Lists.newArrayList();
@@ -71,12 +73,12 @@ private void cleanUp() {
                 // drop tables
                 for (String htableName : allTablesNeedToBeDropped) {
                     logger.info("Deleting HBase table " + htableName);
-                    if (hbaseAdmin.tableExists(htableName)) {
-                        if (hbaseAdmin.isTableEnabled(htableName)) {
-                            hbaseAdmin.disableTable(htableName);
+                    if (hbaseAdmin.tableExists(TableName.valueOf(htableName))) {
+                        if (hbaseAdmin.isTableEnabled(TableName.valueOf(htableName))) {
+                            hbaseAdmin.disableTable(TableName.valueOf(htableName));
                         }
 
-                        hbaseAdmin.deleteTable(htableName);
+                        hbaseAdmin.deleteTable(TableName.valueOf(htableName));
                         logger.info("Deleted HBase table " + htableName);
                     } else {
                         logger.info("HBase table" + htableName + " does not exist");
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
index 937b65fe24..1daca0a16c 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
@@ -23,6 +23,7 @@
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
+import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
@@ -31,12 +32,15 @@
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.ClusterStatus;
-import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.RegionLoad;
 import org.apache.hadoop.hbase.ServerLoad;
 import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.RegionLocator;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.kylin.common.util.Pair;
 import org.slf4j.Logger;
@@ -58,30 +62,31 @@
     /**
      * Computes size of each region for table and given column families.
      * */
-    public HBaseRegionSizeCalculator(HTable table) throws IOException {
-        this(table, new HBaseAdmin(table.getConfiguration()));
-    }
-
-    /** Constructor for unit testing */
-    HBaseRegionSizeCalculator(HTable table, HBaseAdmin hBaseAdmin) throws IOException {
+    public HBaseRegionSizeCalculator(String tableName, Connection hbaseConnection) throws IOException {
 
+        Table table = null;
+        Admin admin = null;
         try {
+            table = hbaseConnection.getTable(TableName.valueOf(tableName));
+            admin = hbaseConnection.getAdmin();
+
             if (!enabled(table.getConfiguration())) {
                 logger.info("Region size calculation disabled.");
                 return;
             }
 
-            logger.info("Calculating region sizes for table \"" + new String(table.getTableName()) + "\".");
+            logger.info("Calculating region sizes for table \"" + table.getName() + "\".");
 
             // Get regions for table.
-            Set<HRegionInfo> tableRegionInfos = table.getRegionLocations().keySet();
+            RegionLocator regionLocator = hbaseConnection.getRegionLocator(table.getName());
+            List<HRegionLocation> regionLocationList = regionLocator.getAllRegionLocations();
             Set<byte[]> tableRegions = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
 
-            for (HRegionInfo regionInfo : tableRegionInfos) {
-                tableRegions.add(regionInfo.getRegionName());
+            for (HRegionLocation hRegionLocation : regionLocationList) {
+                tableRegions.add(hRegionLocation.getRegionInfo().getRegionName());
             }
 
-            ClusterStatus clusterStatus = hBaseAdmin.getClusterStatus();
+            ClusterStatus clusterStatus = admin.getClusterStatus();
             Collection<ServerName> servers = clusterStatus.getServers();
             final long megaByte = 1024L * 1024L;
 
@@ -105,7 +110,7 @@ public HBaseRegionSizeCalculator(HTable table) throws IOException {
                 }
             }
         } finally {
-            IOUtils.closeQuietly(hBaseAdmin);
+            IOUtils.closeQuietly(admin);
         }
 
     }
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
index 266f7e7448..a2f60d4424 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
@@ -23,9 +23,10 @@
 import java.util.Map;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 
@@ -42,8 +43,8 @@ private static void show() throws IOException {
         Map<String, List<String>> envs = Maps.newHashMap();
 
         // get all kylin hbase tables
-        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
-        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
+        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+        Admin hbaseAdmin = conn.getAdmin();
         String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
         HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
         for (HTableDescriptor desc : tableDescriptors) {
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
index e26c8e8888..da13fa475a 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
@@ -32,15 +32,15 @@
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HConnectionManager;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
+import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
@@ -58,11 +58,11 @@
     private static final byte[] QN = "C".getBytes();
 
     public static void createTable(String tableName) throws IOException {
-        HConnection conn = getConnection();
-        HBaseAdmin hadmin = new HBaseAdmin(conn);
+        Connection conn = getConnection();
+        Admin hadmin = conn.getAdmin();
 
         try {
-            boolean tableExist = hadmin.tableExists(tableName);
+            boolean tableExist = hadmin.tableExists(TableName.valueOf(tableName));
             if (tableExist) {
                 logger.info("HTable '" + tableName + "' already exists");
                 return;
@@ -119,8 +119,8 @@ public void run() {
                 e.printStackTrace();
             }
 
-            HConnection conn = getConnection();
-            HTableInterface table = conn.getTable(tableName);
+            Connection conn = getConnection();
+            Table table = conn.getTable(TableName.valueOf(tableName));
 
             byte[] key = new byte[8 + 4];//time + id
 
@@ -135,7 +135,7 @@ public void run() {
                 Bytes.putInt(key, 8, i);
                 Put put = new Put(key);
                 byte[] cell = randomBytes(CELL_SIZE);
-                put.add(CF, QN, cell);
+                put.addColumn(CF, QN, cell);
                 buffer.add(put);
             }
             table.put(buffer);
@@ -170,8 +170,8 @@ public void run() {
             }
 
             Random r = new Random();
-            HConnection conn = getConnection();
-            HTableInterface table = conn.getTable(tableName);
+            Connection conn = getConnection();
+            Table table = conn.getTable(TableName.valueOf(tableName));
 
             long leftBound = getFirstKeyTime(table);
             long rightBound = System.currentTimeMillis();
@@ -206,7 +206,7 @@ public void run() {
         }
     }
 
-    private static long getFirstKeyTime(HTableInterface table) throws IOException {
+    private static long getFirstKeyTime(Table table) throws IOException {
         long startTime = 0;
 
         Scan scan = new Scan();
@@ -224,8 +224,8 @@ private static long getFirstKeyTime(HTableInterface table) throws IOException {
 
     }
 
-    private static HConnection getConnection() throws IOException {
-        return HConnectionManager.createConnection(HBaseConnection.getCurrentHBaseConfiguration());
+    private static Connection getConnection() throws IOException {
+        return HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
     }
 
     private static String formatTime(long time) {
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
index ca1a060dd1..ea05ab2363 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
@@ -23,10 +23,11 @@
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.engine.mr.common.BatchConstants;
@@ -50,8 +51,8 @@
     String metadataValue;
 
     private void alter() throws IOException {
-        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
-        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
+        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+        Admin hbaseAdmin = conn.getAdmin();
         HTableDescriptor table = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
 
         hbaseAdmin.disableTable(table.getTableName());
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
index 8ff5b0fb9b..df4e912eb2 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
@@ -30,10 +30,14 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -52,9 +56,9 @@
     Set<String> metastoreWhitelistSet = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
 
     private void cleanUnusedHBaseTables(Configuration conf) throws IOException {
-
+        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
         // get all kylin hbase tables
-        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
+        Admin hbaseAdmin = conn.getAdmin();
         String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
         HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
         List<String> allTablesNeedToBeDropped = new ArrayList<String>();
@@ -73,12 +77,13 @@ private void cleanUnusedHBaseTables(Configuration conf) throws IOException {
             // drop tables
             for (String htableName : allTablesNeedToBeDropped) {
                 logger.info("Deleting HBase table " + htableName);
-                if (hbaseAdmin.tableExists(htableName)) {
-                    if (hbaseAdmin.isTableEnabled(htableName)) {
-                        hbaseAdmin.disableTable(htableName);
+                TableName tableName = TableName.valueOf(htableName);
+                if (hbaseAdmin.tableExists(tableName)) {
+                    if (hbaseAdmin.isTableEnabled(tableName)) {
+                        hbaseAdmin.disableTable(tableName);
                     }
 
-                    hbaseAdmin.deleteTable(htableName);
+                    hbaseAdmin.deleteTable(tableName);
                     logger.info("Deleted HBase table " + htableName);
                 } else {
                     logger.info("HBase table" + htableName + " does not exist");
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
index 81b79cb88e..8410d4822f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
@@ -22,12 +22,13 @@
 
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HConnectionManager;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.token.TokenUtil;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -58,12 +59,12 @@ public static void main(String[] args) throws IOException {
         Scan scan = new Scan();
         int limit = 20;
 
-        HConnection conn = null;
-        HTableInterface table = null;
+        Connection conn = null;
+        Table table = null;
         ResultScanner scanner = null;
         try {
-            conn = HConnectionManager.createConnection(hconf);
-            table = conn.getTable(hbaseTable);
+            conn = ConnectionFactory.createConnection(hconf);
+            table = conn.getTable(TableName.valueOf(hbaseTable));
             scanner = table.getScanner(scan);
             int count = 0;
             for (Result r : scanner) {
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
index 01edb1fc34..db516bb8cd 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
@@ -22,11 +22,12 @@
 import java.util.Iterator;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HConnectionManager;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.BytesUtil;
 import org.apache.kylin.storage.hbase.HBaseConnection;
@@ -70,8 +71,8 @@ public static void main(String[] args) throws IOException {
 
         logger.info("My Scan " + scan.toString());
 
-        HConnection conn = HConnectionManager.createConnection(conf);
-        HTableInterface tableInterface = conn.getTable(htableName);
+        Connection conn = ConnectionFactory.createConnection(conf);
+        Table tableInterface = conn.getTable(TableName.valueOf(htableName));
 
         Iterator<Result> iterator = tableInterface.getScanner(scan).iterator();
         int counter = 0;
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
index 2c2f11cd68..85f5ad8e53 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
@@ -40,7 +40,9 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.CliCommandExecutor;
@@ -55,6 +57,7 @@
 import org.apache.kylin.job.execution.ExecutableState;
 import org.apache.kylin.job.manager.ExecutableManager;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -74,7 +77,8 @@
     private void cleanUnusedHBaseTables(Configuration conf) throws IOException {
         CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
         // get all kylin hbase tables
-        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
+        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+        Admin hbaseAdmin = conn.getAdmin();
         String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
         HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
         List<String> allTablesNeedToBeDropped = new ArrayList<String>();
@@ -150,22 +154,22 @@ protected void execute(OptionsHelper optionsHelper) throws Exception {
     }
 
     class DeleteHTableRunnable implements Callable {
-        HBaseAdmin hbaseAdmin;
+        Admin hbaseAdmin;
         String htableName;
 
-        DeleteHTableRunnable(HBaseAdmin hbaseAdmin, String htableName) {
+        DeleteHTableRunnable(Admin hbaseAdmin, String htableName) {
             this.hbaseAdmin = hbaseAdmin;
             this.htableName = htableName;
         }
 
         public Object call() throws Exception {
             logger.info("Deleting HBase table " + htableName);
-            if (hbaseAdmin.tableExists(htableName)) {
-                if (hbaseAdmin.isTableEnabled(htableName)) {
-                    hbaseAdmin.disableTable(htableName);
+            if (hbaseAdmin.tableExists(TableName.valueOf(htableName))) {
+                if (hbaseAdmin.isTableEnabled(TableName.valueOf(htableName))) {
+                    hbaseAdmin.disableTable(TableName.valueOf(htableName));
                 }
 
-                hbaseAdmin.deleteTable(htableName);
+                hbaseAdmin.deleteTable(TableName.valueOf(htableName));
                 logger.info("Deleted HBase table " + htableName);
             } else {
                 logger.info("HBase table" + htableName + " does not exist");
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
index e36f662927..42a54c8caf 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
@@ -24,16 +24,18 @@
 import java.util.List;
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
-import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -49,14 +51,15 @@
     private List<String> errorMsgs = Lists.newArrayList();
 
     private List<String> htables;
-    private HBaseAdmin hbaseAdmin;
+    private Admin hbaseAdmin;
     private KylinConfig kylinConfig;
     private String oldHostValue;
 
     public UpdateHTableHostCLI(List<String> htables, String oldHostValue) throws IOException {
         this.htables = htables;
         this.oldHostValue = oldHostValue;
-        this.hbaseAdmin = new HBaseAdmin(HBaseConnection.getCurrentHBaseConfiguration());
+        Connection conn = ConnectionFactory.createConnection(HBaseConfiguration.create());
+        hbaseAdmin = conn.getAdmin();
         this.kylinConfig = KylinConfig.getInstanceFromEnv();
     }
 
@@ -166,9 +169,9 @@ private void updateHtable(String tableName) throws IOException {
         HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
         if (oldHostValue.equals(desc.getValue(IRealizationConstants.HTableTag))) {
             desc.setValue(IRealizationConstants.HTableTag, kylinConfig.getMetadataUrlPrefix());
-            hbaseAdmin.disableTable(tableName);
-            hbaseAdmin.modifyTable(tableName, desc);
-            hbaseAdmin.enableTable(tableName);
+            hbaseAdmin.disableTable(TableName.valueOf(tableName));
+            hbaseAdmin.modifyTable(TableName.valueOf(tableName), desc);
+            hbaseAdmin.enableTable(TableName.valueOf(tableName));
 
             updatedResources.add(tableName);
         }
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
index d016e4feed..291072fd7f 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
@@ -173,8 +173,8 @@ protected CompareTupleFilter buildCompareCaseFilter(List<TblColRef> groups, Stri
         return compareFilter;
     }
 
-    protected CompareTupleFilter buildCompareDynamicFilter(List<TblColRef> groups) {
-        CompareTupleFilter compareFilter = new CompareTupleFilter(FilterOperatorEnum.EQ);
+    protected CompareTupleFilter buildCompareDynamicFilter(List<TblColRef> groups, FilterOperatorEnum operator) {
+        CompareTupleFilter compareFilter = new CompareTupleFilter(operator);
         compareFilter.addChild(new ColumnTupleFilter(groups.get(0)));
         compareFilter.addChild(new DynamicTupleFilter("?0"));
         compareFilter.bindVariable("?0", "abc");
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterSerializeTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterSerializeTest.java
index 844dbeb4fd..e494e2ef73 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterSerializeTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterSerializeTest.java
@@ -22,7 +22,6 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.kylin.metadata.filter.CompareTupleFilter;
 import org.apache.kylin.metadata.filter.LogicalTupleFilter;
 import org.apache.kylin.metadata.filter.TupleFilter;
 import org.apache.kylin.metadata.filter.TupleFilter.FilterOperatorEnum;
@@ -38,11 +37,7 @@
  */
 public class FilterSerializeTest extends FilterBaseTest {
 
-    @Test
-    public void testSerialize01() {
-        List<TblColRef> groups = buildGroups();
-        TupleFilter filter = buildEQCompareFilter(groups, 0);
-
+    private void assertFilterSerDe(TupleFilter filter) {
         byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
         TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
 
@@ -50,51 +45,33 @@ public void testSerialize01() {
     }
 
     @Test
-    public void testSerialize02() {
-        List<TblColRef> groups = buildGroups();
-        TupleFilter filter = buildEQCompareFilter(groups, 1);
-
-        byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
-        TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
+    public void testSerialize01() {
+        assertFilterSerDe(buildEQCompareFilter(buildGroups(), 0));
+    }
 
-        compareFilter(filter, newFilter);
+    @Test
+    public void testSerialize02() {
+        assertFilterSerDe(buildEQCompareFilter(buildGroups(), 1));
     }
 
     @Test
     public void testSerialize03() {
-        List<TblColRef> groups = buildGroups();
-        TupleFilter filter = buildAndFilter(groups);
-
-        byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
-        TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
-
-        compareFilter(filter, newFilter);
+        assertFilterSerDe(buildAndFilter(buildGroups()));
     }
 
     @Test
     public void testSerialize04() {
-        List<TblColRef> groups = buildGroups();
-        TupleFilter filter = buildOrFilter(groups);
-
-        byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
-        TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
-
-        compareFilter(filter, newFilter);
+        assertFilterSerDe(buildOrFilter(buildGroups()));
     }
 
     @Test
     public void testSerialize05() {
         ColumnDesc column = new ColumnDesc();
-
         TblColRef colRef = column.getRef();
         List<TblColRef> groups = new ArrayList<TblColRef>();
         groups.add(colRef);
-        TupleFilter filter = buildEQCompareFilter(groups, 0);
 
-        byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
-        TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
-
-        compareFilter(filter, newFilter);
+        assertFilterSerDe(buildEQCompareFilter(groups, 0));
     }
 
     @Test
@@ -104,12 +81,8 @@ public void testSerialize06() {
         TblColRef colRef = column.getRef();
         List<TblColRef> groups = new ArrayList<TblColRef>();
         groups.add(colRef);
-        TupleFilter filter = buildEQCompareFilter(groups, 0);
 
-        byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
-        TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
-
-        compareFilter(filter, newFilter);
+        assertFilterSerDe(buildEQCompareFilter(groups, 0));
     }
 
     @Test
@@ -123,12 +96,8 @@ public void testSerialize07() {
         TblColRef colRef = column.getRef();
         List<TblColRef> groups = new ArrayList<TblColRef>();
         groups.add(colRef);
-        TupleFilter filter = buildEQCompareFilter(groups, 0);
 
-        byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
-        TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
-
-        compareFilter(filter, newFilter);
+        assertFilterSerDe(buildEQCompareFilter(groups, 0));
     }
 
     @Test
@@ -141,12 +110,8 @@ public void testSerialize08() {
         TblColRef colRef = column.getRef();
         List<TblColRef> groups = new ArrayList<TblColRef>();
         groups.add(colRef);
-        TupleFilter filter = buildEQCompareFilter(groups, 0);
 
-        byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
-        TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
-
-        compareFilter(filter, newFilter);
+        assertFilterSerDe(buildEQCompareFilter(groups, 0));
     }
 
     @Test
@@ -159,10 +124,7 @@ public void testSerialize10() {
         logicFilter.addChild(orFilter);
         logicFilter.addChild(andFilter);
 
-        byte[] bytes = TupleFilterSerializer.serialize(logicFilter, CS);
-        TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
-
-        compareFilter(logicFilter, newFilter);
+        assertFilterSerDe(logicFilter);
     }
 
     @Test
@@ -175,60 +137,32 @@ public void testSerialize11() {
         logicFilter.addChild(orFilter);
         logicFilter.addChild(andFilter);
 
-        byte[] bytes = TupleFilterSerializer.serialize(logicFilter, CS);
-        TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
-
-        compareFilter(logicFilter, newFilter);
+        assertFilterSerDe(logicFilter);
     }
 
     @Test
     public void testSerialize12() {
-        List<TblColRef> groups = buildGroups();
-        TupleFilter filter = buildCaseFilter(groups);
-
-        byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
-        TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
-
-        compareFilter(filter, newFilter);
+        assertFilterSerDe(buildCaseFilter(buildGroups()));
     }
 
     @Test
     public void testSerialize13() {
-        List<TblColRef> groups = buildGroups();
-        TupleFilter filter = buildCompareCaseFilter(groups, "0");
-
-        byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
-        TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
-
-        compareFilter(filter, newFilter);
+        assertFilterSerDe(buildCompareCaseFilter(buildGroups(), "0"));
     }
 
     @Test
     public void testSerialize14() throws ParseException {
-        List<TblColRef> groups = buildGroups();
-        TupleFilter filter = buildINCompareFilter(groups.get(0));
-
-        long start = System.currentTimeMillis();
-        byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
-        System.out.println("Size of serialized filter " + bytes.length + ", serialize time: " + (System.currentTimeMillis() - start));
-        TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
-
-        compareFilter(filter, newFilter);
+        assertFilterSerDe(buildINCompareFilter(buildGroups().get(0)));
     }
 
     @Test
     public void testDynamic() {
-        final CompareTupleFilter compareDynamicFilter = buildCompareDynamicFilter(buildGroups());
-
-        byte[] bytes = TupleFilterSerializer.serialize(compareDynamicFilter, CS);
-        TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);
-
-        bytes = TupleFilterSerializer.serialize(newFilter, CS);
-        TupleFilter newFilter2 = TupleFilterSerializer.deserialize(bytes, CS);
-
-        compareFilter(compareDynamicFilter, newFilter);
-        compareFilter(compareDynamicFilter, newFilter2);
-
+        assertFilterSerDe(buildCompareDynamicFilter(buildGroups(), FilterOperatorEnum.EQ));
+        assertFilterSerDe(buildCompareDynamicFilter(buildGroups(), FilterOperatorEnum.NEQ));
+        assertFilterSerDe(buildCompareDynamicFilter(buildGroups(), FilterOperatorEnum.GT));
+        assertFilterSerDe(buildCompareDynamicFilter(buildGroups(), FilterOperatorEnum.LT));
+        assertFilterSerDe(buildCompareDynamicFilter(buildGroups(), FilterOperatorEnum.GTE));
+        assertFilterSerDe(buildCompareDynamicFilter(buildGroups(), FilterOperatorEnum.LTE));
     }
 
 }
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java
index c25b690983..4695353688 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java
@@ -35,6 +35,7 @@
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.Type;
 import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.hadoop.hbase.regionserver.ScannerContext;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.gridtable.StorageSideBehavior;
 import org.apache.kylin.metadata.datatype.LongMutable;
@@ -229,15 +230,8 @@ public boolean next(List<Cell> results) throws IOException {
             return nextRaw(results);
         }
 
-        /*
-         * (non-Javadoc)
-         * 
-         * @see
-         * org.apache.hadoop.hbase.regionserver.InternalScanner#next(java.util
-         * .List, int)
-         */
         @Override
-        public boolean next(List<Cell> result, int limit) throws IOException {
+        public boolean next(List<Cell> result, ScannerContext scannerContext) throws IOException {
             return next(result);
         }
 
@@ -306,6 +300,11 @@ public long getMvccReadPoint() {
             return 0;
         }
 
+        @Override
+        public int getBatch() {
+            return 0;
+        }
+
         /*
          * (non-Javadoc)
          * 
@@ -322,16 +321,9 @@ public boolean nextRaw(List<Cell> result) throws IOException {
             return i < input.size();
         }
 
-        /*
-         * (non-Javadoc)
-         * 
-         * @see
-         * org.apache.hadoop.hbase.regionserver.RegionScanner#nextRaw(java.util
-         * .List, int)
-         */
         @Override
-        public boolean nextRaw(List<Cell> result, int limit) throws IOException {
-            return nextRaw(result);
+        public boolean nextRaw(List<Cell> list, ScannerContext scannerContext) throws IOException {
+            return false;
         }
 
     }
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/filter/TestFuzzyRowFilterV2EndToEnd.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/filter/TestFuzzyRowFilterV2EndToEnd.java
index 1d85922a22..04e2e8b5af 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/filter/TestFuzzyRowFilterV2EndToEnd.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/filter/TestFuzzyRowFilterV2EndToEnd.java
@@ -44,6 +44,7 @@
 import org.apache.hadoop.hbase.filter.FilterList.Operator;
 import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
 import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.regionserver.RegionScanner;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
@@ -136,7 +137,7 @@ public void testEndToEnd() throws Exception {
 
                         Put p = new Put(rk);
                         p.setDurability(Durability.SKIP_WAL);
-                        p.add(cf.getBytes(), cq, Bytes.toBytes(c));
+                        p.addColumn(cf.getBytes(), cq, Bytes.toBytes(c));
                         ht.put(p);
                     }
                 }
@@ -224,7 +225,7 @@ private void runScanner(HTable hTable, int expectedSize, Filter filter) throws I
         scan.addFamily(cf.getBytes());
         scan.setFilter(filter);
         List<HRegion> regions = TEST_UTIL.getHBaseCluster().getRegions(table.getBytes());
-        HRegion first = regions.get(0);
+        Region first = regions.get(0);
         first.getScanner(scan);
         RegionScanner scanner = first.getScanner(scan);
         List<Cell> results = new ArrayList<Cell>();
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/MockupMapContext.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/MockupMapContext.java
index d5c3f60255..5adf3277a0 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/MockupMapContext.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/MockupMapContext.java
@@ -99,11 +99,6 @@ public String getStatus() {
                 throw new NotImplementedException();
             }
 
-            @Override
-            public float getProgress() {
-                throw new NotImplementedException();
-            }
-
             @Override
             public Counter getCounter(Enum<?> counterName) {
                 throw new NotImplementedException();
@@ -164,6 +159,11 @@ public String getJobName() {
                 throw new NotImplementedException();
             }
 
+            @Override
+            public boolean userClassesTakesPrecedence() {
+                return false;
+            }
+
             @Override
             public Class<? extends InputFormat<?, ?>> getInputFormatClass() throws ClassNotFoundException {
                 throw new NotImplementedException();
@@ -214,10 +214,6 @@ public boolean getJobSetupCleanupNeeded() {
                 throw new NotImplementedException();
             }
 
-            @Override
-            public boolean getTaskCleanupNeeded() {
-                throw new NotImplementedException();
-            }
 
             @Override
             public boolean getProfileEnabled() {
@@ -229,11 +225,6 @@ public String getProfileParams() {
                 throw new NotImplementedException();
             }
 
-            @Override
-            public IntegerRanges getProfileTaskRange(boolean isMap) {
-                throw new NotImplementedException();
-            }
-
             @Override
             public String getUser() {
                 throw new NotImplementedException();
diff --git a/tomcat-ext/pom.xml b/tomcat-ext/pom.xml
index 722488f676..f04becfc7d 100644
--- a/tomcat-ext/pom.xml
+++ b/tomcat-ext/pom.xml
@@ -17,15 +17,13 @@
  limitations under the License.
 -->
 
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
-         xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
     <packaging>jar</packaging>
     <parent>
         <groupId>org.apache.kylin</groupId>
         <artifactId>kylin</artifactId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
     </parent>
 
     <artifactId>kylin-tomcat-ext</artifactId>
diff --git a/tool/pom.xml b/tool/pom.xml
index e530469289..97c7e6fdf8 100644
--- a/tool/pom.xml
+++ b/tool/pom.xml
@@ -17,12 +17,11 @@
  limitations under the License.
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <parent>
         <artifactId>kylin</artifactId>
         <groupId>org.apache.kylin</groupId>
-        <version>1.6.0-SNAPSHOT</version>
+        <version>1.6.0</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
@@ -44,6 +43,18 @@
         </dependency>
 
         <!--Env-->
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-yarn-api</artifactId>
+            <version>${yarn.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-yarn-common</artifactId>
+            <version>${yarn.version}</version>
+            <scope>provided</scope>
+        </dependency>
         <dependency>
             <groupId>org.apache.hbase</groupId>
             <artifactId>hbase-client</artifactId>
diff --git a/tool/src/main/java/org/apache/kylin/tool/JobDiagnosisInfoCLI.java b/tool/src/main/java/org/apache/kylin/tool/JobDiagnosisInfoCLI.java
index c8ff1f40ae..638d97b40b 100644
--- a/tool/src/main/java/org/apache/kylin/tool/JobDiagnosisInfoCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/JobDiagnosisInfoCLI.java
@@ -87,7 +87,7 @@ public static void main(String[] args) {
 
     @Override
     protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception {
-        String jobId = optionsHelper.getOptionValue(OPTION_JOB_ID);
+        String kylinJobId = optionsHelper.getOptionValue(OPTION_JOB_ID);
         boolean includeCube = optionsHelper.hasOption(OPTION_INCLUDE_CUBE) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CUBE)) : true;
         boolean includeYarnLogs = optionsHelper.hasOption(OPTION_INCLUDE_YARN_LOGS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_YARN_LOGS)) : true;
         boolean includeClient = optionsHelper.hasOption(OPTION_INCLUDE_CLIENT) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CLIENT)) : true;
@@ -95,14 +95,14 @@ protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throw
 
         // dump job output
         logger.info("Start to dump job output");
-        ExecutablePO executablePO = executableDao.getJob(jobId);
-        addRequired(ResourceStore.EXECUTE_RESOURCE_ROOT + "/" + jobId);
-        addRequired(ResourceStore.EXECUTE_OUTPUT_RESOURCE_ROOT + "/" + jobId);
-        for (ExecutablePO task : executablePO.getTasks()) {
-            addRequired(ResourceStore.EXECUTE_RESOURCE_ROOT + "/" + task.getUuid());
-            addRequired(ResourceStore.EXECUTE_OUTPUT_RESOURCE_ROOT + "/" + task.getUuid());
+        ExecutablePO executablePO = executableDao.getJob(kylinJobId);
+        addRequired(ResourceStore.EXECUTE_RESOURCE_ROOT + "/" + kylinJobId);
+        addRequired(ResourceStore.EXECUTE_OUTPUT_RESOURCE_ROOT + "/" + kylinJobId);
+        for (ExecutablePO kylinTask : executablePO.getTasks()) {
+            addRequired(ResourceStore.EXECUTE_RESOURCE_ROOT + "/" + kylinTask.getUuid());
+            addRequired(ResourceStore.EXECUTE_OUTPUT_RESOURCE_ROOT + "/" + kylinTask.getUuid());
             if (includeYarnLogs) {
-                yarnLogsResources.add(task.getUuid());
+                yarnLogsResources.add(kylinTask.getUuid());
             }
         }
         extractResources(exportDir);
@@ -121,14 +121,14 @@ protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throw
             }
         }
 
-        // dump yarn logs
+        // dump mr job info
         if (includeYarnLogs) {
-            logger.info("Start to dump yarn job logs: " + jobId);
-            File yarnLogDir = new File(exportDir, "yarn");
-            FileUtils.forceMkdir(yarnLogDir);
+            logger.info("Start to dump mr job info: " + kylinJobId);
+            File yarnDir = new File(exportDir, "yarn");
+            FileUtils.forceMkdir(yarnDir);
             for (String stepId : yarnLogsResources) {
-                extractTaskCounter(stepId, new File(new File(yarnLogDir, stepId), "Counters"));
-                extractYarnLog(stepId, new File(yarnLogDir, stepId), true);
+                extractJobInfo(stepId, new File(yarnDir, stepId));
+                extractJobLog(stepId, new File(yarnDir, stepId), true);
             }
         }
 
@@ -171,7 +171,7 @@ private void extractResources(File destDir) {
         }
     }
 
-    private void extractYarnLog(String taskId, File destDir, boolean onlyFail) throws Exception {
+    private void extractJobLog(String taskId, File destDir, boolean onlyFail) throws Exception {
         final Map<String, String> jobInfo = executableDao.getJobOutput(taskId).getInfo();
         FileUtils.forceMkdir(destDir);
         if (jobInfo.containsKey(ExecutableConstants.MR_JOB_ID)) {
@@ -189,12 +189,13 @@ private void extractYarnLog(String taskId, File destDir, boolean onlyFail) throw
         }
     }
 
-    private void extractTaskCounter(String taskId, File destDir) throws Exception {
+    private void extractJobInfo(String taskId, File destDir) throws Exception {
         final Map<String, String> jobInfo = executableDao.getJobOutput(taskId).getInfo();
         if (jobInfo.containsKey(ExecutableConstants.MR_JOB_ID)) {
             String jobId = jobInfo.get(ExecutableConstants.MR_JOB_ID);
             FileUtils.forceMkdir(destDir);
-            new JobTaskCounterExtractor(jobId).executeExtract(destDir);
+            String[] mrJobArgs = { "-destDir", destDir.getAbsolutePath(), "-compress", "false", "-submodule", "true" };
+            new MrJobInfoExtractor(jobId).execute(mrJobArgs);
         }
     }
 
diff --git a/tool/src/main/java/org/apache/kylin/tool/JobTaskCounterExtractor.java b/tool/src/main/java/org/apache/kylin/tool/MrJobInfoExtractor.java
similarity index 69%
rename from tool/src/main/java/org/apache/kylin/tool/JobTaskCounterExtractor.java
rename to tool/src/main/java/org/apache/kylin/tool/MrJobInfoExtractor.java
index 6a317e9f59..96f47d3965 100644
--- a/tool/src/main/java/org/apache/kylin/tool/JobTaskCounterExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/MrJobInfoExtractor.java
@@ -24,6 +24,8 @@
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.httpclient.HttpClient;
 import org.apache.commons.httpclient.HttpMethod;
 import org.apache.commons.httpclient.methods.GetMethod;
@@ -42,16 +44,21 @@
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
 
-public class JobTaskCounterExtractor extends AbstractInfoExtractor {
+public class MrJobInfoExtractor extends AbstractInfoExtractor {
     private String mrJobId;
-    private String yarnUrl;
-    private static final Logger logger = LoggerFactory.getLogger(JobTaskCounterExtractor.class);
+    private String jobUrlPrefix;
+
+    private static final Logger logger = LoggerFactory.getLogger(MrJobInfoExtractor.class);
+
+    @SuppressWarnings("static-access")
+    private static final Option OPTION_INCLUDE_COUNTERS = OptionBuilder.withArgName("includeCounters").hasArg().isRequired(false).withDescription("Specify whether to include mr task counters to extract. Default false.").create("includeCounters");
 
     private final int HTTP_RETRY = 3;
 
-    public JobTaskCounterExtractor(String mrJobId) {
+    public MrJobInfoExtractor(String mrJobId) {
         this.mrJobId = mrJobId;
-        this.yarnUrl = getRestCheckUrl();
+        String historyServerUrl = getRestCheckUrl();
+        this.jobUrlPrefix = historyServerUrl + "/ws/v1/history/mapreduce/jobs/" + mrJobId;
     }
 
     private String getRestCheckUrl() {
@@ -60,12 +67,12 @@ private String getRestCheckUrl() {
         Pattern pattern = Pattern.compile("(http://)(.*):.*");
         if (yarnStatusCheckUrl != null) {
             Matcher m = pattern.matcher(yarnStatusCheckUrl);
-            m.matches();
-            yarnUrl = m.group(1) + m.group(2) + ":19888";
-            return yarnUrl;
-        } else {
-            logger.info("kylin.job.yarn.app.rest.check.status.url" + " is not set read from hadoop configuration");
+            if (m.matches()) {
+                return m.group(1) + m.group(2) + ":19888";
+            }
         }
+        logger.info("kylin.job.yarn.app.rest.check.status.url" + " is not set read from hadoop configuration");
+
         Configuration conf = HadoopUtil.getCurrentConfiguration();
         String rmWebHost = HAUtil.getConfValueForRMInstance(YarnConfiguration.RM_WEBAPP_ADDRESS, YarnConfiguration.DEFAULT_RM_WEBAPP_ADDRESS, conf);
         if (HAUtil.isHAEnabled(conf)) {
@@ -107,10 +114,44 @@ private String getHttpResponse(String url) {
         return response;
     }
 
-    protected void executeExtract(File exportDir) {
+    private void extractTaskCounter(String taskId, File exportDir, String taskUrl) throws IOException {
+        try {
+            String response = getHttpResponse(taskUrl + taskId + "/counters");
+            FileUtils.writeStringToFile(new File(exportDir, taskId + ".json"), response, Charset.defaultCharset());
+        } catch (Exception e) {
+            logger.warn("Failed to get task counters rest response" + e);
+        }
+    }
+
+    private void extractJobConf(File exportDir) throws IOException {
+        try {
+            String jobResponse = getHttpResponse(jobUrlPrefix);
+            JsonNode job = new ObjectMapper().readTree(jobResponse).path("job").get("state");
+            String confUrl = jobUrlPrefix + "/conf/";
+            String response = getHttpResponse(confUrl);
+            FileUtils.writeStringToFile(new File(exportDir, "job_conf.json"), response, Charset.defaultCharset());
+        } catch (Exception e) {
+            logger.warn("Failed to get job conf rest response.", e);
+        }
+    }
+
+    @Override
+    protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception {
+        try {
+            boolean includeTaskCounter = optionsHelper.hasOption(OPTION_INCLUDE_COUNTERS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_COUNTERS)) : false;
+            if (includeTaskCounter) {
+                extractTaskCounters(exportDir);
+            }
+            extractJobConf(exportDir);
+        } catch (Exception e) {
+            logger.warn("Failed to get mr tasks rest response.", e);
+        }
+    }
+
+    private void extractTaskCounters(File exportDir) {
         try {
-            String taskUrl = yarnUrl + "/ws/v1/history/mapreduce/jobs/" + mrJobId + "/tasks/";
-            String tasksResponse = getHttpResponse(taskUrl);
+            String tasksUrl = jobUrlPrefix + "/tasks/";
+            String tasksResponse = getHttpResponse(tasksUrl);
             JsonNode tasks = new ObjectMapper().readTree(tasksResponse).path("tasks").path("task");
 
             String maxReduceId = null;
@@ -132,24 +173,12 @@ protected void executeExtract(File exportDir) {
                     }
                 }
             }
-            extractTaskCounterFile(maxMapId, exportDir, taskUrl);
-            extractTaskCounterFile(maxReduceId, exportDir, taskUrl);
+            File counterDir = new File(exportDir, "counters");
+            FileUtils.forceMkdir(counterDir);
+            extractTaskCounter(maxMapId, counterDir, tasksUrl);
+            extractTaskCounter(maxReduceId, counterDir, tasksUrl);
         } catch (Exception e) {
             logger.warn("Failed to get mr tasks rest response" + e);
         }
     }
-
-    private void extractTaskCounterFile(String taskId, File exportDir, String taskUrl) throws IOException {
-        try {
-            String response = getHttpResponse(taskUrl + taskId + "/counters");
-            FileUtils.writeStringToFile(new File(exportDir, taskId + ".json"), response, Charset.defaultCharset());
-        } catch (Exception e) {
-            logger.warn("Failed to get task counters rest response" + e);
-        }
-    }
-
-    @Override
-    protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception {
-        executeExtract(exportDir);
-    }
 }
diff --git a/webapp/app/index.html b/webapp/app/index.html
index 8fb2cb919d..ad881d5c8e 100644
--- a/webapp/app/index.html
+++ b/webapp/app/index.html
@@ -163,7 +163,7 @@
 <!--New GUI-->
 <script src="js/model/modelsManager.js"></script>
 <script src="js/services/badQuery.js"></script>
-
+<script src="js/utils/utils.js"></script>
 <script src="js/controllers/page.js"></script>
 <script src="js/controllers/index.js"></script>
 <script src="js/controllers/access.js"></script>
diff --git a/webapp/app/js/controllers/cubeAdvanceSetting.js b/webapp/app/js/controllers/cubeAdvanceSetting.js
index be90f655f4..0ba321c168 100644
--- a/webapp/app/js/controllers/cubeAdvanceSetting.js
+++ b/webapp/app/js/controllers/cubeAdvanceSetting.js
@@ -27,18 +27,24 @@ KylinApp.controller('CubeAdvanceSettingCtrl', function ($scope, $modal,cubeConfi
   angular.forEach($scope.cubeMetaFrame.rowkey.rowkey_columns,function(item){
     //var _isDictionaries = item.encoding === "dict"?"true":"false";
     var _isFixedLength = item.encoding.substring(0,12) === "fixed_length"?"true":"false";//fixed_length:12
-    var _isIntLength = item.encoding.substring(0,3) === "int"?"true":"false";//fixed_length:12
+    var _isIntegerLength = item.encoding.substring(0,7) === "integer"?"true":"false";
+    var _isIntLength = item.encoding.substring(0,3) === "int"?"true":"false";
     var _encoding = item.encoding;
     var _valueLength ;
     if(_isFixedLength !=="false"){
       _valueLength = item.encoding.substring(13,item.encoding.length);
       _encoding = "fixed_length";
     }
-    if(_isIntLength!="false"){
+    if(_isIntLength!="false" && _isIntegerLength=="false" ){
       _valueLength = item.encoding.substring(4,item.encoding.length);
       _encoding = "int";
     }
 
+    if(_isIntegerLength!="false" ){
+      _valueLength = item.encoding.substring(8,item.encoding.length);
+      _encoding = "integer";
+    }
+
     var rowkeyObj = {
       column:item.column,
       encoding:_encoding,
@@ -66,6 +72,8 @@ KylinApp.controller('CubeAdvanceSettingCtrl', function ($scope, $modal,cubeConfi
       }
       else if(item.encoding=="int" && item.valueLength){
         encoding = "int:"+item.valueLength;
+      }else if(item.encoding=="integer" && item.valueLength){
+        encoding = "integer:"+item.valueLength;
       }else{
         encoding = item.encoding;
       }
diff --git a/webapp/app/js/controllers/cubeDimensions.js b/webapp/app/js/controllers/cubeDimensions.js
index 5525fe4d3e..7cb850bf2c 100644
--- a/webapp/app/js/controllers/cubeDimensions.js
+++ b/webapp/app/js/controllers/cubeDimensions.js
@@ -74,11 +74,12 @@ KylinApp.controller('CubeDimensionsCtrl', function ($scope, $modal,MetaModel,cub
             cols[i].isLookup = false;
 
             // Default not selected and not disabled.
-            factSelectAvailable[cols[i].name] = {name:cols[i].name ,selected: false, disabled: false};
+            factSelectAvailable[cols[i].name] = {name:cols[i].name ,selected: false};
 
         }
 
         $scope.availableColumns[factTable] = cols;
+        factSelectAvailable.all=false;
         $scope.selectedColumns[factTable] = factSelectAvailable;
         $scope.availableTables.push(factTable);
 
@@ -96,10 +97,11 @@ KylinApp.controller('CubeDimensionsCtrl', function ($scope, $modal,MetaModel,cub
                 cols2[k].isLookup = true;
 
                 // Default not selected and not disabled.
-                lookupSelectAvailable[cols2[k].name] = {name:cols2[k].table+"_derived",selected: false, disabled: false};
+                lookupSelectAvailable[cols2[k].name] = {name:cols2[k].name,selected: false};
             }
 
             $scope.availableColumns[lookups[j].table] = cols2;
+            lookupSelectAvailable.all=false;
             $scope.selectedColumns[lookups[j].table] = lookupSelectAvailable;
             if($scope.availableTables.indexOf(lookups[j].table)==-1){
                 $scope.availableTables.push(lookups[j].table);
@@ -111,14 +113,22 @@ KylinApp.controller('CubeDimensionsCtrl', function ($scope, $modal,MetaModel,cub
     $scope.initColumnStatus = function () {
         angular.forEach($scope.cubeMetaFrame.dimensions, function (dim) {
             var cols = dimCols(dim);
-
             angular.forEach(cols, function (colName) {
               if(dim.derived){
-                 $scope.selectedColumns[dim.table][colName] = {name:dim.name, selected: true, disabled: true,normal:"false"};
+                 $scope.selectedColumns[dim.table][colName] = {name:dim.name, selected: true, normal:"false"};
               }else{
-                 $scope.selectedColumns[dim.table][colName] = {name:dim.name, selected: true, disabled: true,normal:"true"};
+                 $scope.selectedColumns[dim.table][colName] = {name:dim.name, selected: true, normal:"true"};
               }
-           });
+            });
+        });
+        angular.forEach($scope.selectedColumns,function(value,table){
+              var all=true;
+              angular.forEach(value,function(col){
+                   if(col.selected==false&&typeof col=="object"){
+                        all=false;
+                   }
+             });
+             $scope.selectedColumns[table].all=all;
         });
     };
 
@@ -300,9 +310,9 @@ KylinApp.controller('CubeDimensionsCtrl', function ($scope, $modal,MetaModel,cub
         var cols = dimCols(dim);
         angular.forEach(cols, function (colName) {
             if(dim.table==$scope.metaModel.model.fact_table){
-               $scope.selectedColumns[dim.table][colName] = {name:colName,selected: false, disabled: false};
+               $scope.selectedColumns[dim.table][colName] = {name:colName,selected: false};
             }else{
-               $scope.selectedColumns[dim.table][colName] = {name:dim.table+"_derived",selected: false, disabled: false};
+               $scope.selectedColumns[dim.table][colName] = {name:colName,selected: false};
             }
         });
     };
@@ -395,25 +405,77 @@ KylinApp.controller('CubeDimensionsCtrl', function ($scope, $modal,MetaModel,cub
     };
 
     $scope.autoChange= function(table,name){
-        if($scope.selectedColumns[table][name].selected==false){
-             $scope.selectedColumns[table][name].normal=null;
-             if(table==$scope.metaModel.model.fact_table){
-                 $scope.selectedColumns[table][name].name=name;
-             }else{
-                 $scope.selectedColumns[table][name].name=table+"_derived";
-             }
-        }else{
-             if($scope.metaModel.model.fact_table!=table){
-                 $scope.selectedColumns[table][name].normal="false";
-             }
-        }
+         if($scope.metaModel.model.fact_table==table){
+               if($scope.selectedColumns[table][name].selected==false){
+                    $scope.selectedColumns[table].all=false;
+               }else{
+                    var all=true;
+                    angular.forEach($scope.selectedColumns[table],function(col){
+                          if(col.selected==false&&typeof col=="object"){
+                                 all=false;
+                          }
+                    });
+                    $scope.selectedColumns[table].all=all;
+               }
+         }
+         else{
+              if($scope.selectedColumns[table][name].selected==false){
+                   $scope.selectedColumns[table].all=false;
+                   $scope.selectedColumns[table][name].normal=null;
+                   $scope.selectedColumns[table][name].name=name;
+              }else{
+                   var all=true;
+                   angular.forEach($scope.selectedColumns[table],function(col){
+                       if(col.selected==false&&typeof col=="object"){
+                       all=false;
+                       }
+                   });
+                   $scope.selectedColumns[table].all=all;
+                   if($scope.metaModel.model.fact_table!=table){
+                       $scope.selectedColumns[table][name].normal="false";
+                   }
+              }
+         }
+    }
+    $scope.autoChangeAll= function(table){
+         if($scope.metaModel.model.fact_table==table){
+              if($scope.selectedColumns[table].all==true){
+                   angular.forEach($scope.selectedColumns[table],function(col){
+                        if(typeof col==="object"){
+                           col.selected=true;
+                        }
+                   })
+              }else{
+                   angular.forEach($scope.selectedColumns[table],function(col){
+                        if(typeof col==="object"){
+                           col.selected=false;
+                        }
+                   })
+              }
+         }else{
+              if($scope.selectedColumns[table].all==true){
+                   angular.forEach($scope.selectedColumns[table],function(col){
+                        if(col.selected==false&&typeof col==="object"){
+                           col.selected=true;
+                           $scope.autoChange(table,col.name);
+                        }
 
+                   })
+              }else{
+                    angular.forEach($scope.selectedColumns[table],function(col){
+                        if(typeof col==="object"){
+                          col.selected=false;
+                          $scope.autoChange(table,col.name);
+                        }
+                    })
+              }
+         }
     }
     $scope.checkAutoDimension=function(){
         var nameNull=false;
         angular.forEach($scope.selectedColumns, function (value, table) {
              angular.forEach(value, function (status, colName) {
-                  if (status.selected) {
+                  if (status.selected&&typeof status=="object") {
                       if(status.name==""){
                            SweetAlert.swal('', "The name is requested.", 'warning');
                            nameNull=true;
@@ -442,8 +504,10 @@ KylinApp.controller('CubeDimensionsCtrl', function ($scope, $modal,MetaModel,cub
         var selectedCols = $scope.getSelectedCols();
         angular.forEach($scope.selectedColumns, function (value, table) {
             angular.forEach(value, function (status, colName) {
-                 status.selected=false;
-                 status.normal=null;
+                if(typeof status=="object"){
+                    status.selected=false;
+                    status.normal=null;
+                }
             });
         });
     };
diff --git a/webapp/app/js/controllers/cubeEdit.js b/webapp/app/js/controllers/cubeEdit.js
index 5f467998fe..acd4d3299e 100755
--- a/webapp/app/js/controllers/cubeEdit.js
+++ b/webapp/app/js/controllers/cubeEdit.js
@@ -137,18 +137,44 @@ KylinApp.controller('CubeEditCtrl', function ($scope, $q, $routeParams, $locatio
     return me_columns;
   };
 
-  $scope.getGroupByColumns = function () {
-    var groupBy_columns=[];
+  $scope.getAllModelDimMeasureColumns = function () {
+    var me_columns = [];
+    if($scope.metaModel.model.metrics){
+      angular.forEach($scope.metaModel.model.metrics,function(metric,index){
+        me_columns.push(metric);
+      })
+    }
 
     angular.forEach($scope.metaModel.model.dimensions,function(dimension,index){
       if(dimension.columns){
-        groupBy_columns = groupBy_columns.concat(dimension.columns);
+        me_columns = me_columns.concat(dimension.columns);
       }
     })
 
-    return groupBy_columns;
+    return distinct_array(me_columns);
   };
 
+  $scope.getAllModelDimColumns = function () {
+    var me_columns = [];
+    angular.forEach($scope.metaModel.model.dimensions,function(dimension,index){
+      if(dimension.columns){
+        me_columns = me_columns.concat(dimension.columns);
+      }
+    })
+
+    return distinct_array(me_columns);
+  };
+
+  function distinct_array(arrays){
+    var arr = [];
+    for(var item in arrays){
+      if(arr.indexOf(arrays[item])==-1){
+        arr.push(arrays[item]);
+      }
+    }
+    return arr;
+  }
+
 
   $scope.getExtendedHostColumn = function(){
     var me_columns = [];
@@ -180,8 +206,9 @@ KylinApp.controller('CubeEditCtrl', function ($scope, $q, $routeParams, $locatio
       }
 
     });
-    angular.forEach($scope.cubeMetaFrame.measure,function(measure){
-         if(measure.function.parameter.type==column){
+
+    angular.forEach($scope.cubeMetaFrame.measures,function(measure){
+         if(measure.function.parameter.type == "column"){
            me_columns.push(measure.function.parameter.value);
          }
     });
@@ -266,11 +293,11 @@ KylinApp.controller('CubeEditCtrl', function ($scope, $q, $routeParams, $locatio
     });
 
     var queryParam = {
-      cube_name: $routeParams.cubeName
+      cubeId: $routeParams.cubeName
     };
-    CubeService.list(queryParam, {},function(instance){
-      if (instance.length > 0) {
-        $scope.instance = instance[0];
+    CubeService.getCube(queryParam, {},function(instance){
+      if (instance) {
+        $scope.instance = instance;
         $scope.state.cubeInstance =angular.toJson($scope.instance,true);
 
       } else {
diff --git a/webapp/app/js/controllers/cubeMeasures.js b/webapp/app/js/controllers/cubeMeasures.js
index 006c6c2313..938e918f2b 100644
--- a/webapp/app/js/controllers/cubeMeasures.js
+++ b/webapp/app/js/controllers/cubeMeasures.js
@@ -54,17 +54,24 @@ KylinApp.controller('CubeMeasuresCtrl', function ($scope, $modal,MetaModel,cubes
         var _name=configuration.slice(14);
         var item=$scope.newMeasure.function.configuration[configuration];
         var _isFixedLength = item.substring(0,12) === "fixed_length"?"true":"false";//fixed_length:12
-        var _isIntLength = item.substring(0,3) === "int"?"true":"false";//fixed_length:12
+        var _isIntegerLength = item.substring(0,7) === "integer"?"true":"false";
+        var _isIntLength = item.substring(0,3) === "int"?"true":"false";
         var _encoding = item;
         var _valueLength = 0 ;
         if(_isFixedLength !=="false"){
           _valueLength = item.substring(13,item.length);
           _encoding = "fixed_length";
         }
-        if(_isIntLength!="false"){
+        if(_isIntLength!="false" && _isIntegerLength=="false" ){
           _valueLength = item.substring(4,item.length);
           _encoding = "int";
         }
+
+        if(_isIntegerLength!="false" ){
+          _valueLength = item.substring(8,item.length);
+          _encoding = "integer";
+        }
+
         $scope.GroupBy = {
           name:_name,
           encoding:_encoding,
@@ -172,8 +179,9 @@ KylinApp.controller('CubeMeasuresCtrl', function ($scope, $modal,MetaModel,cubes
           if(item.encoding!=="dict" && item.encoding!=="date"&& item.encoding!=="time"){
             if(item.encoding=="fixed_length" && item.valueLength){
               encoding = "fixed_length:"+item.valueLength;
-            }
-            else if(item.encoding=="int" && item.valueLength){
+            }else if(item.encoding=="integer" && item.valueLength){
+              encoding = "integer:"+item.valueLength;
+            }else if(item.encoding=="int" && item.valueLength){
               encoding = "int:"+item.valueLength;
             }else{
               encoding = item.encoding;
@@ -221,7 +229,7 @@ KylinApp.controller('CubeMeasuresCtrl', function ($scope, $modal,MetaModel,cubes
     $scope.nextPara = {
       "type":"column",
       "value":"",
-      "next_parameter":{}
+      "next_parameter": null
     }
     if($scope.newMeasure){
       $scope.newMeasure.function.parameter.next_parameter = null;
diff --git a/webapp/app/js/controllers/cubes.js b/webapp/app/js/controllers/cubes.js
index 499e93c562..f735fb8126 100644
--- a/webapp/app/js/controllers/cubes.js
+++ b/webapp/app/js/controllers/cubes.js
@@ -334,12 +334,42 @@ KylinApp.controller('CubesCtrl', function ($scope, $q, $routeParams, $location,
       $scope.loadDetail(cube);
       // for streaming cube build tip
       if(cube.streaming){
-        $modal.open({
-          templateUrl: 'streamingBuild.html',
-          controller: streamingBuildCtrl,
-          resolve: {
+
+        SweetAlert.swal({
+          title: '',
+          text: "Are you sure to start the build?",
+          type: '',
+          showCancelButton: true,
+          confirmButtonColor: '#DD6B55',
+          confirmButtonText: "Yes",
+          closeOnConfirm: true
+        }, function(isConfirm) {
+          if(isConfirm){
+            loadingRequest.show();
+            CubeService.rebuildStreamingCube(
+              {
+                cubeId: cube.name
+              },
+              {
+                sourceOffsetStart:0,
+                sourceOffsetEnd:'9223372036854775807',
+                buildType:'BUILD'
+              }, function (job) {
+                loadingRequest.hide();
+                SweetAlert.swal('Success!', 'Rebuild job was submitted successfully', 'success');
+              },function(e){
+
+                loadingRequest.hide();
+                if(e.data&& e.data.exception){
+                  var message =e.data.exception;
+                  var msg = !!(message) ? message : 'Failed to take action.';
+                  SweetAlert.swal('Oops...', msg, 'error');
+                }else{
+                  SweetAlert.swal('Oops...', "Failed to take action.", 'error');
+                }
+            });
           }
-        });
+        })
         return;
       }
 
diff --git a/webapp/app/js/controllers/query.js b/webapp/app/js/controllers/query.js
index dede294742..6be915b9d2 100644
--- a/webapp/app/js/controllers/query.js
+++ b/webapp/app/js/controllers/query.js
@@ -19,7 +19,7 @@
 'use strict';
 
 KylinApp
-    .controller('QueryCtrl', function ($scope, storage, $base64, $q, $location, $anchorScroll, $routeParams, QueryService, $modal, MessageService, $domUtilityService, $timeout, TableService,SweetAlert) {
+    .controller('QueryCtrl', function ($scope, storage, $base64, $q, $location, $anchorScroll, $routeParams, QueryService, $modal, MessageService, $domUtilityService, $timeout, TableService, SweetAlert, VdmUtil) {
         $scope.mainPanel = 'query';
         $scope.rowsPerPage = 50000;
         $scope.base64 = $base64;
@@ -207,7 +207,14 @@ KylinApp
                 } else {
                     oneQuery.result.data = data;
                 }
-
+                angular.forEach(oneQuery.result.data,function(row,index){
+                    angular.forEach(row,function(column,value){
+                        var float =VdmUtil.SCToFloat(column);
+                        if (float!=""){
+                            oneQuery.result.data[index][value]=parseFloat(float);
+                        }
+                    });
+                });
                 $scope.curQuery.result.isResponsePartial = result.partial;
             }
 
diff --git a/webapp/app/js/directives/select.js b/webapp/app/js/directives/select.js
index c8cf6a26f3..7327af9d13 100644
--- a/webapp/app/js/directives/select.js
+++ b/webapp/app/js/directives/select.js
@@ -1,14 +1,13 @@
 /*!
  * ui-select
  * http://github.com/angular-ui/ui-select
- * Version: 0.13.2 - 2015-10-09T15:34:24.040Z
+ * Version: 0.19.5 - 2016-10-24T23:13:59.434Z
  * License: MIT
  */
 
 
 (function () {
 "use strict";
-
 var KEY = {
     TAB: 9,
     ENTER: 13,
@@ -42,7 +41,7 @@ var KEY = {
             return true;
         }
 
-        if (e.metaKey) return true;
+        if (e.metaKey || e.ctrlKey || e.altKey) return true;
 
         return false;
     },
@@ -55,6 +54,13 @@ var KEY = {
     },
     isHorizontalMovement: function (k){
       return ~[KEY.LEFT,KEY.RIGHT,KEY.BACKSPACE,KEY.DELETE].indexOf(k);
+    },
+    toSeparator: function (k) {
+      var sep = {ENTER:"\n",TAB:"\t",SPACE:" "}[k];
+      if (sep) return sep;
+      // return undefined for special keys other than enter, tab or space.
+      // no way to use them to cut strings.
+      return KEY[k] ? undefined : k;
     }
   };
 
@@ -103,11 +109,16 @@ var uis = angular.module('ui.select', [])
   placeholder: '', // Empty by default, like HTML tag <select>
   refreshDelay: 1000, // In milliseconds
   closeOnSelect: true,
+  skipFocusser: false,
   dropdownPosition: 'auto',
+  removeSelected: true,
+  resetSearchInput: true,
   generateId: function() {
     return latestId++;
   },
-  appendToBody: false
+  appendToBody: false,
+  spinnerEnabled: false,
+  spinnerClass: 'glyphicon-refresh ui-select-spin'
 })
 
 // See Rename minErr and make it accessible from outside https://github.com/angular/angular.js/issues/6913
@@ -139,11 +150,11 @@ var uis = angular.module('ui.select', [])
  */
 .filter('highlight', function() {
   function escapeRegexp(queryToEscape) {
-    return queryToEscape.replace(/([.?*+^$[\]\\(){}|-])/g, '\\$1');
+    return ('' + queryToEscape).replace(/([.?*+^$[\]\\(){}|-])/g, '\\$1');
   }
 
   return function(matchItem, query) {
-    return query && matchItem ? matchItem.replace(new RegExp(escapeRegexp(query), 'gi'), '<span class="ui-select-highlight">$&</span>') : matchItem;
+    return query && matchItem ? ('' + matchItem).replace(new RegExp(escapeRegexp(query), 'gi'), '<span class="ui-select-highlight">$&</span>') : matchItem;
   };
 })
 
@@ -169,8 +180,8 @@ var uis = angular.module('ui.select', [])
 }]);
 
 uis.directive('uiSelectChoices',
-  ['uiSelectConfig', 'uisRepeatParser', 'uiSelectMinErr', '$compile',
-  function(uiSelectConfig, RepeatParser, uiSelectMinErr, $compile) {
+  ['uiSelectConfig', 'uisRepeatParser', 'uiSelectMinErr', '$compile', '$window',
+  function(uiSelectConfig, RepeatParser, uiSelectMinErr, $compile, $window) {
 
   return {
     restrict: 'EA',
@@ -178,6 +189,9 @@ uis.directive('uiSelectChoices',
     replace: true,
     transclude: true,
     templateUrl: function(tElement) {
+      // Needed so the uiSelect can detect the transcluded content
+      tElement.addClass('ui-select-choices');
+
       // Gets theme attribute from parent (ui-select)
       var theme = tElement.parent().attr('theme') || uiSelectConfig.theme;
       return theme + '/choices.tpl.html';
@@ -187,44 +201,59 @@ uis.directive('uiSelectChoices',
 
       if (!tAttrs.repeat) throw uiSelectMinErr('repeat', "Expected 'repeat' expression.");
 
-      return function link(scope, element, attrs, $select, transcludeFn) {
+      // var repeat = RepeatParser.parse(attrs.repeat);
+      var groupByExp = tAttrs.groupBy;
+      var groupFilterExp = tAttrs.groupFilter;
 
-        // var repeat = RepeatParser.parse(attrs.repeat);
-        var groupByExp = attrs.groupBy;
-        var groupFilterExp = attrs.groupFilter;
+      if (groupByExp) {
+        var groups = tElement.querySelectorAll('.ui-select-choices-group');
+        if (groups.length !== 1) throw uiSelectMinErr('rows', "Expected 1 .ui-select-choices-group but got '{0}'.", groups.length);
+        groups.attr('ng-repeat', RepeatParser.getGroupNgRepeatExpression());
+      }
 
-        $select.parseRepeatAttr(attrs.repeat, groupByExp, groupFilterExp); //Result ready at $select.parserResult
+      var parserResult = RepeatParser.parse(tAttrs.repeat);
 
-        $select.disableChoiceExpression = attrs.uiDisableChoice;
-        $select.onHighlightCallback = attrs.onHighlight;
+      var choices = tElement.querySelectorAll('.ui-select-choices-row');
+      if (choices.length !== 1) {
+        throw uiSelectMinErr('rows', "Expected 1 .ui-select-choices-row but got '{0}'.", choices.length);
+      }
 
-        $select.dropdownPosition = attrs.position ? attrs.position.toLowerCase() : uiSelectConfig.dropdownPosition;
+      choices.attr('ng-repeat', parserResult.repeatExpression(groupByExp))
+             .attr('ng-if', '$select.open'); //Prevent unnecessary watches when dropdown is closed
 
-        if(groupByExp) {
-          var groups = element.querySelectorAll('.ui-select-choices-group');
-          if (groups.length !== 1) throw uiSelectMinErr('rows', "Expected 1 .ui-select-choices-group but got '{0}'.", groups.length);
-          groups.attr('ng-repeat', RepeatParser.getGroupNgRepeatExpression());
-        }
 
-        var choices = element.querySelectorAll('.ui-select-choices-row');
-        if (choices.length !== 1) {
-          throw uiSelectMinErr('rows', "Expected 1 .ui-select-choices-row but got '{0}'.", choices.length);
-        }
+      var rowsInner = tElement.querySelectorAll('.ui-select-choices-row-inner');
+      if (rowsInner.length !== 1) {
+        throw uiSelectMinErr('rows', "Expected 1 .ui-select-choices-row-inner but got '{0}'.", rowsInner.length);
+      }
+      rowsInner.attr('uis-transclude-append', ''); //Adding uisTranscludeAppend directive to row element after choices element has ngRepeat
 
-        choices.attr('ng-repeat', $select.parserResult.repeatExpression(groupByExp))
-            .attr('ng-if', '$select.open') //Prevent unnecessary watches when dropdown is closed
-            .attr('ng-click', '$select.select(' + $select.parserResult.itemName + ',false,$event)');
+      // If IE8 then need to target rowsInner to apply the ng-click attr as choices will not capture the event.
+      var clickTarget = $window.document.addEventListener ? choices : rowsInner;
+      clickTarget.attr('ng-click', '$select.select(' + parserResult.itemName + ',$select.skipFocusser,$event)');
 
-        var rowsInner = element.querySelectorAll('.ui-select-choices-row-inner');
-        if (rowsInner.length !== 1) throw uiSelectMinErr('rows', "Expected 1 .ui-select-choices-row-inner but got '{0}'.", rowsInner.length);
-        rowsInner.attr('uis-transclude-append', ''); //Adding uisTranscludeAppend directive to row element after choices element has ngRepeat
+      return function link(scope, element, attrs, $select) {
 
-        $compile(element, transcludeFn)(scope); //Passing current transcludeFn to be able to append elements correctly from uisTranscludeAppend
+
+        $select.parseRepeatAttr(attrs.repeat, groupByExp, groupFilterExp); //Result ready at $select.parserResult
+
+        $select.disableChoiceExpression = attrs.uiDisableChoice;
+        $select.onHighlightCallback = attrs.onHighlight;
+
+        $select.dropdownPosition = attrs.position ? attrs.position.toLowerCase() : uiSelectConfig.dropdownPosition;
+
+        scope.$on('$destroy', function() {
+          choices.remove();
+        });
 
         scope.$watch('$select.search', function(newValue) {
           if(newValue && !$select.open && $select.multiple) $select.activate(false, true);
           $select.activeIndex = $select.tagging.isActivated ? -1 : 0;
-          $select.refresh(attrs.refresh);
+          if (!attrs.minimumInputLength || $select.search.length >= attrs.minimumInputLength) {
+            $select.refresh(attrs.refresh);
+          } else {
+            $select.items = [];
+          }
         });
 
         attrs.$observe('refreshDelay', function() {
@@ -232,6 +261,14 @@ uis.directive('uiSelectChoices',
           var refreshDelay = scope.$eval(attrs.refreshDelay);
           $select.refreshDelay = refreshDelay !== undefined ? refreshDelay : uiSelectConfig.refreshDelay;
         });
+
+        scope.$watch('$select.open', function(open) {
+          if (open) {
+            tElement.attr('role', 'listbox');
+          } else {
+            tElement.removeAttr('role');
+          }
+        });
       };
     }
   };
@@ -244,8 +281,8 @@ uis.directive('uiSelectChoices',
  * put as much logic in the controller (instead of the link functions) as possible so it can be easily tested.
  */
 uis.controller('uiSelectCtrl',
-  ['$scope', '$element', '$timeout', '$filter', 'uisRepeatParser', 'uiSelectMinErr', 'uiSelectConfig', '$parse',
-  function($scope, $element, $timeout, $filter, RepeatParser, uiSelectMinErr, uiSelectConfig, $parse) {
+  ['$scope', '$element', '$timeout', '$filter', '$$uisDebounce', 'uisRepeatParser', 'uiSelectMinErr', 'uiSelectConfig', '$parse', '$injector', '$window',
+  function($scope, $element, $timeout, $filter, $$uisDebounce, RepeatParser, uiSelectMinErr, uiSelectConfig, $parse, $injector, $window) {
 
   var ctrl = this;
 
@@ -255,9 +292,15 @@ uis.controller('uiSelectCtrl',
   ctrl.searchEnabled = uiSelectConfig.searchEnabled;
   ctrl.sortable = uiSelectConfig.sortable;
   ctrl.refreshDelay = uiSelectConfig.refreshDelay;
+  ctrl.paste = uiSelectConfig.paste;
+  ctrl.resetSearchInput = uiSelectConfig.resetSearchInput;
+  ctrl.refreshing = false;
+  ctrl.spinnerEnabled = uiSelectConfig.spinnerEnabled;
+  ctrl.spinnerClass = uiSelectConfig.spinnerClass;
 
-  ctrl.removeSelected = false; //If selected item(s) should be removed from dropdown list
+  ctrl.removeSelected = uiSelectConfig.removeSelected; //If selected item(s) should be removed from dropdown list
   ctrl.closeOnSelect = true; //Initialized inside uiSelect directive link function
+  ctrl.skipFocusser = false; //Set to true to avoid returning focus to ctrl when item is selected
   ctrl.search = EMPTY_SEARCH;
 
   ctrl.activeIndex = 0; //Dropdown of choices
@@ -271,7 +314,6 @@ uis.controller('uiSelectCtrl',
   ctrl.dropdownPosition = 'auto';
 
   ctrl.focusser = undefined; //Reference to input element used to handle focus events
-  ctrl.resetSearchInput = true;
   ctrl.multiple = undefined; // Initialized inside uiSelect directive link function
   ctrl.disableChoiceExpression = undefined; // Initialized inside uiSelectChoices directive link function
   ctrl.tagging = {isActivated: false, fct: undefined};
@@ -279,6 +321,17 @@ uis.controller('uiSelectCtrl',
   ctrl.lockChoiceExpression = undefined; // Initialized inside uiSelectMatch directive link function
   ctrl.clickTriggeredSelect = false;
   ctrl.$filter = $filter;
+  ctrl.$element = $element;
+
+  // Use $injector to check for $animate and store a reference to it
+  ctrl.$animate = (function () {
+    try {
+      return $injector.get('$animate');
+    } catch (err) {
+      // $animate does not exist
+      return null;
+    }
+  })();
 
   ctrl.searchInput = $element.querySelectorAll('input.ui-select-search');
   if (ctrl.searchInput.length !== 1) {
@@ -286,16 +339,36 @@ uis.controller('uiSelectCtrl',
   }
 
   ctrl.isEmpty = function() {
-    return angular.isUndefined(ctrl.selected) || ctrl.selected === null || ctrl.selected === '';
+    return angular.isUndefined(ctrl.selected) || ctrl.selected === null || ctrl.selected === '' || (ctrl.multiple && ctrl.selected.length === 0);
   };
 
+  function _findIndex(collection, predicate, thisArg){
+    if (collection.findIndex){
+      return collection.findIndex(predicate, thisArg);
+    } else {
+      var list = Object(collection);
+      var length = list.length >>> 0;
+      var value;
+
+      for (var i = 0; i < length; i++) {
+        value = list[i];
+        if (predicate.call(thisArg, value, i, list)) {
+          return i;
+        }
+      }
+      return -1;
+    }
+  }
+
   // Most of the time the user does not want to empty the search input when in typeahead mode
   function _resetSearchInput() {
-    if (ctrl.resetSearchInput || (ctrl.resetSearchInput === undefined && uiSelectConfig.resetSearchInput)) {
+    if (ctrl.resetSearchInput) {
       ctrl.search = EMPTY_SEARCH;
       //reset activeIndex
       if (ctrl.selected && ctrl.items.length && !ctrl.multiple) {
-        ctrl.activeIndex = ctrl.items.indexOf(ctrl.selected);
+        ctrl.activeIndex = _findIndex(ctrl.items, function(item){
+          return angular.equals(this, item);
+        }, ctrl.selected);
       }
     }
   }
@@ -329,15 +402,48 @@ uis.controller('uiSelectCtrl',
         ctrl.activeIndex = 0;
       }
 
-      // Give it time to appear before focus
-      $timeout(function() {
-        ctrl.search = initSearchValue || ctrl.search;
-        ctrl.searchInput[0].focus();
-        if(!ctrl.tagging.isActivated && ctrl.items.length > 1) {
-          _ensureHighlightVisible();
+      var container = $element.querySelectorAll('.ui-select-choices-content');
+      var searchInput = $element.querySelectorAll('.ui-select-search');
+      if (ctrl.$animate && ctrl.$animate.on && ctrl.$animate.enabled(container[0])) {
+        var animateHandler = function(elem, phase) {
+          if (phase === 'start' && ctrl.items.length === 0) {
+            // Only focus input after the animation has finished
+            ctrl.$animate.off('removeClass', searchInput[0], animateHandler);
+            $timeout(function () {
+              ctrl.focusSearchInput(initSearchValue);
+            });
+          } else if (phase === 'close') {
+            // Only focus input after the animation has finished
+            ctrl.$animate.off('enter', container[0], animateHandler);
+            $timeout(function () {
+              ctrl.focusSearchInput(initSearchValue);
+            });
+          }
+        };
+
+        if (ctrl.items.length > 0) {
+          ctrl.$animate.on('enter', container[0], animateHandler);
+        } else {
+          ctrl.$animate.on('removeClass', searchInput[0], animateHandler);
         }
-      });
+      } else {
+        $timeout(function () {
+          ctrl.focusSearchInput(initSearchValue);
+          if(!ctrl.tagging.isActivated && ctrl.items.length > 1) {
+            _ensureHighlightVisible();
+          }
+        });
+      }
     }
+    else if (ctrl.open && !ctrl.searchEnabled) {
+      // Close the selection if we don't have search enabled, and we click on the select again
+      ctrl.close();
+    }
+  };
+
+  ctrl.focusSearchInput = function (initSearchValue) {
+    ctrl.search = initSearchValue || ctrl.search;
+    ctrl.searchInput[0].focus();
   };
 
   ctrl.findGroupByName = function(name) {
@@ -412,17 +518,23 @@ uis.controller('uiSelectCtrl',
       data = data || ctrl.parserResult.source($scope);
       var selectedItems = ctrl.selected;
       //TODO should implement for single mode removeSelected
-      if (ctrl.isEmpty() || (angular.isArray(selectedItems) && !selectedItems.length) || !ctrl.removeSelected) {
+      if (ctrl.isEmpty() || (angular.isArray(selectedItems) && !selectedItems.length) || !ctrl.multiple || !ctrl.removeSelected) {
         ctrl.setItemsFn(data);
       }else{
-        if ( data !== undefined ) {
-          var filteredItems = data.filter(function(i) {return selectedItems && selectedItems.indexOf(i) < 0;});
+        if ( data !== undefined && data !== null ) {
+          var filteredItems = data.filter(function(i) {
+            return angular.isArray(selectedItems) ? selectedItems.every(function(selectedItem) {
+              return !angular.equals(i, selectedItem);
+            }) : !angular.equals(i, selectedItems);
+          });
           ctrl.setItemsFn(filteredItems);
         }
       }
       if (ctrl.dropdownPosition === 'auto' || ctrl.dropdownPosition === 'up'){
         $scope.calculateDropdownPos();
       }
+
+      $scope.$broadcast('uis:refresh');
     };
 
     // See https://github.com/angular/angular.js/blob/v1.2.15/src/ng/directive/ngRepeat.js#L259
@@ -439,7 +551,11 @@ uis.controller('uiSelectCtrl',
           //Remove already selected items (ex: while searching)
           //TODO Should add a test
           ctrl.refreshItems(items);
-          ctrl.ngModel.$modelValue = null; //Force scope model value and ngModel value to be out of sync to re-run formatters
+
+          //update the view value with fresh data from items, if there is a valid model value
+          if(angular.isDefined(ctrl.ngModel.$modelValue)) {
+            ctrl.ngModel.$modelValue = null; //Force scope model value and ngModel value to be out of sync to re-run formatters
+          }
         }
       }
     });
@@ -455,7 +571,6 @@ uis.controller('uiSelectCtrl',
    */
   ctrl.refresh = function(refreshAttr) {
     if (refreshAttr !== undefined) {
-
       // Debounce
       // See https://github.com/angular-ui/bootstrap/blob/0.10.0/src/typeahead/typeahead.js#L155
       // FYI AngularStrap typeahead does not have debouncing: https://github.com/mgcrea/angular-strap/blob/v2.0.0-rc.4/src/typeahead/typeahead.js#L177
@@ -463,8 +578,13 @@ uis.controller('uiSelectCtrl',
         $timeout.cancel(_refreshDelayPromise);
       }
       _refreshDelayPromise = $timeout(function() {
-        $scope.$eval(refreshAttr);
-      }, ctrl.refreshDelay);
+        var refreshPromise =  $scope.$eval(refreshAttr);
+        if (refreshPromise && angular.isFunction(refreshPromise.then) && !ctrl.refreshing) {
+          ctrl.refreshing = true;
+          refreshPromise.then(function() {
+            ctrl.refreshing = false;
+          });
+      }}, ctrl.refreshDelay);
     }
   };
 
@@ -473,9 +593,9 @@ uis.controller('uiSelectCtrl',
       return false;
     }
     var itemIndex = ctrl.items.indexOf(itemScope[ctrl.itemProperty]);
-    var isActive =  itemIndex === ctrl.activeIndex;
+    var isActive =  itemIndex == ctrl.activeIndex;
 
-    if ( !isActive || ( itemIndex < 0 && ctrl.taggingLabel !== false ) ||( itemIndex < 0 && ctrl.taggingLabel === false) ) {
+    if ( !isActive || itemIndex < 0 ) {
       return false;
     }
 
@@ -486,18 +606,49 @@ uis.controller('uiSelectCtrl',
     return isActive;
   };
 
+  var _isItemSelected = function (item) {
+    return (ctrl.selected && angular.isArray(ctrl.selected) &&
+        ctrl.selected.filter(function (selection) { return angular.equals(selection, item); }).length > 0);
+  };
+
+  var disabledItems = [];
+
+  function _updateItemDisabled(item, isDisabled) {
+    var disabledItemIndex = disabledItems.indexOf(item);
+    if (isDisabled && disabledItemIndex === -1) {
+      disabledItems.push(item);
+    }
+
+    if (!isDisabled && disabledItemIndex > -1) {
+      disabledItems.splice(disabledItemIndex, 1);
+    }
+  }
+
+  function _isItemDisabled(item) {
+    return disabledItems.indexOf(item) > -1;
+  }
+
   ctrl.isDisabled = function(itemScope) {
 
     if (!ctrl.open) return;
 
-    var itemIndex = ctrl.items.indexOf(itemScope[ctrl.itemProperty]);
+    var item = itemScope[ctrl.itemProperty];
+    var itemIndex = ctrl.items.indexOf(item);
     var isDisabled = false;
-    var item;
 
-    if (itemIndex >= 0 && !angular.isUndefined(ctrl.disableChoiceExpression)) {
-      item = ctrl.items[itemIndex];
-      isDisabled = !!(itemScope.$eval(ctrl.disableChoiceExpression)); // force the boolean value
-      item._uiSelectChoiceDisabled = isDisabled; // store this for later reference
+    if (itemIndex >= 0 && (angular.isDefined(ctrl.disableChoiceExpression) || ctrl.multiple)) {
+
+      if (item.isTag) return false;
+
+      if (ctrl.multiple) {
+        isDisabled = _isItemSelected(item);
+      }
+
+      if (!isDisabled && angular.isDefined(ctrl.disableChoiceExpression)) {
+        isDisabled = !!(itemScope.$eval(ctrl.disableChoiceExpression));
+      }
+
+      _updateItemDisabled(item, isDisabled);
     }
 
     return isDisabled;
@@ -506,16 +657,23 @@ uis.controller('uiSelectCtrl',
 
   // When the user selects an item with ENTER or clicks the dropdown
   ctrl.select = function(item, skipFocusser, $event) {
-    if (item === undefined || !item._uiSelectChoiceDisabled) {
+    if (item === undefined || !_isItemDisabled(item)) {
 
-      if ( ! ctrl.items && ! ctrl.search ) return;
+      if ( ! ctrl.items && ! ctrl.search && ! ctrl.tagging.isActivated) return;
 
-      if (!item || !item._uiSelectChoiceDisabled) {
-        if(ctrl.tagging.isActivated) {
-          // if taggingLabel is disabled, we pull from ctrl.search val
+      if (!item || !_isItemDisabled(item)) {
+        // if click is made on existing item, prevent from tagging, ctrl.search does not matter
+        ctrl.clickTriggeredSelect = false;
+        if($event && ($event.type === 'click' || $event.type === 'touchend') && item)
+          ctrl.clickTriggeredSelect = true;
+
+        if(ctrl.tagging.isActivated && ctrl.clickTriggeredSelect === false) {
+          // if taggingLabel is disabled and item is undefined we pull from ctrl.search
           if ( ctrl.taggingLabel === false ) {
             if ( ctrl.activeIndex < 0 ) {
-              item = ctrl.tagging.fct !== undefined ? ctrl.tagging.fct(ctrl.search) : ctrl.search;
+              if (item === undefined) {
+                item = ctrl.tagging.fct !== undefined ? ctrl.tagging.fct(ctrl.search) : ctrl.search;
+              }
               if (!item || angular.equals( ctrl.items[0], item ) ) {
                 return;
               }
@@ -534,7 +692,7 @@ uis.controller('uiSelectCtrl',
               // create new item on the fly if we don't already have one;
               // use tagging function if we have one
               if ( ctrl.tagging.fct !== undefined && typeof item === 'string' ) {
-                item = ctrl.tagging.fct(ctrl.search);
+                item = ctrl.tagging.fct(item);
                 if (!item) return;
               // if item type is 'string', apply the tagging label
               } else if ( typeof item === 'string' ) {
@@ -544,12 +702,12 @@ uis.controller('uiSelectCtrl',
             }
           }
           // search ctrl.selected for dupes potentially caused by tagging and return early if found
-          if ( ctrl.selected && angular.isArray(ctrl.selected) && ctrl.selected.filter( function (selection) { return angular.equals(selection, item); }).length > 0 ) {
+          if (_isItemSelected(item)) {
             ctrl.close(skipFocusser);
             return;
           }
         }
-
+        _resetSearchInput();
         $scope.$broadcast('uis:select', item);
 
         var locals = {};
@@ -565,9 +723,6 @@ uis.controller('uiSelectCtrl',
         if (ctrl.closeOnSelect) {
           ctrl.close(skipFocusser);
         }
-        if ($event && $event.type === 'click') {
-          ctrl.clickTriggeredSelect = true;
-        }
       }
     }
   };
@@ -576,9 +731,8 @@ uis.controller('uiSelectCtrl',
   ctrl.close = function(skipFocusser) {
     if (!ctrl.open) return;
     if (ctrl.ngModel && ctrl.ngModel.$setTouched) ctrl.ngModel.$setTouched();
-    _resetSearchInput();
     ctrl.open = false;
-
+    _resetSearchInput();
     $scope.$broadcast('uis:close', skipFocusser);
 
   };
@@ -606,18 +760,56 @@ uis.controller('uiSelectCtrl',
     }
   };
 
-  ctrl.isLocked = function(itemScope, itemIndex) {
-      var isLocked, item = ctrl.selected[itemIndex];
+  // Set default function for locked choices - avoids unnecessary
+  // logic if functionality is not being used
+  ctrl.isLocked = function () {
+    return false;
+  };
+
+  $scope.$watch(function () {
+    return angular.isDefined(ctrl.lockChoiceExpression) && ctrl.lockChoiceExpression !== "";
+  }, _initaliseLockedChoices);
+
+  function _initaliseLockedChoices(doInitalise) {
+    if(!doInitalise) return;
+
+    var lockedItems = [];
+
+    function _updateItemLocked(item, isLocked) {
+      var lockedItemIndex = lockedItems.indexOf(item);
+      if (isLocked && lockedItemIndex === -1) {
+        lockedItems.push(item);
+        }
+
+      if (!isLocked && lockedItemIndex > -1) {
+        lockedItems.splice(lockedItemIndex, 0);
+      }
+    }
+
+    function _isItemlocked(item) {
+      return lockedItems.indexOf(item) > -1;
+    }
+
+    ctrl.isLocked = function (itemScope, itemIndex) {
+      var isLocked = false,
+          item = ctrl.selected[itemIndex];
 
-      if (item && !angular.isUndefined(ctrl.lockChoiceExpression)) {
-          isLocked = !!(itemScope.$eval(ctrl.lockChoiceExpression)); // force the boolean value
-          item._uiSelectChoiceLocked = isLocked; // store this for later reference
+      if(item) {
+        if (itemScope) {
+          isLocked = !!(itemScope.$eval(ctrl.lockChoiceExpression));
+          _updateItemLocked(item, isLocked);
+        } else {
+          isLocked = _isItemlocked(item);
+        }
       }
 
       return isLocked;
-  };
+    };
+  }
+
 
   var sizeWatch = null;
+  var updaterScheduled = false;
   ctrl.sizeSearchInput = function() {
 
     var input = ctrl.searchInput[0],
@@ -639,12 +831,18 @@ uis.controller('uiSelectCtrl',
     ctrl.searchInput.css('width', '10px');
     $timeout(function() { //Give tags time to render correctly
       if (sizeWatch === null && !updateIfVisible(calculateContainerWidth())) {
-        sizeWatch = $scope.$watch(calculateContainerWidth, function(containerWidth) {
-          if (updateIfVisible(containerWidth)) {
-            sizeWatch();
-            sizeWatch = null;
+        sizeWatch = $scope.$watch(function() {
+          if (!updaterScheduled) {
+            updaterScheduled = true;
+            $scope.$$postDigest(function() {
+              updaterScheduled = false;
+              if (updateIfVisible(calculateContainerWidth())) {
+                sizeWatch();
+                sizeWatch = null;
+              }
+            });
           }
-        });
+        }, angular.noop);
       }
     });
   };
@@ -665,7 +863,7 @@ uis.controller('uiSelectCtrl',
         break;
       case KEY.ENTER:
         if(ctrl.open && (ctrl.tagging.isActivated || ctrl.activeIndex >= 0)){
-          ctrl.select(ctrl.items[ctrl.activeIndex]); // Make sure at least one dropdown item is highlighted before adding if not in tagging mode
+          ctrl.select(ctrl.items[ctrl.activeIndex], ctrl.skipFocusser); // Make sure at least one dropdown item is highlighted before adding if not in tagging mode
         } else {
           ctrl.activate(false, true); //In case its the search input in 'multiple' mode
         }
@@ -684,17 +882,20 @@ uis.controller('uiSelectCtrl',
 
     var key = e.which;
 
-    // if(~[KEY.ESC,KEY.TAB].indexOf(key)){
-    //   //TODO: SEGURO?
-    //   ctrl.close();
-    // }
+    if (~[KEY.ENTER,KEY.ESC].indexOf(key)){
+      e.preventDefault();
+      e.stopPropagation();
+    }
 
     $scope.$apply(function() {
 
       var tagged = false;
 
       if (ctrl.items.length > 0 || ctrl.tagging.isActivated) {
-        _handleDropDownSelection(key);
+        if(!_handleDropDownSelection(key) && !ctrl.searchEnabled) {
+          e.preventDefault();
+          e.stopPropagation();
+        }
         if ( ctrl.taggingTokens.isActivated ) {
           for (var i = 0; i < ctrl.taggingTokens.tokens.length; i++) {
             if ( ctrl.taggingTokens.tokens[i] === KEY.MAP[e.keyCode] ) {
@@ -730,18 +931,45 @@ uis.controller('uiSelectCtrl',
 
   });
 
-  // If tagging try to split by tokens and add items
   ctrl.searchInput.on('paste', function (e) {
-    var data = e.originalEvent.clipboardData.getData('text/plain');
-    if (data && data.length > 0 && ctrl.taggingTokens.isActivated && ctrl.tagging.fct) {
-      var items = data.split(ctrl.taggingTokens.tokens[0]); // split by first token only
-      if (items && items.length > 0) {
+    var data;
+
+    if (window.clipboardData && window.clipboardData.getData) { // IE
+      data = window.clipboardData.getData('Text');
+    } else {
+      data = (e.originalEvent || e).clipboardData.getData('text/plain');
+    }
+
+    // Prepend the current input field text to the paste buffer.
+    data = ctrl.search + data;
+
+    if (data && data.length > 0) {
+      // If tagging try to split by tokens and add items
+      if (ctrl.taggingTokens.isActivated) {
+        var items = [];
+        for (var i = 0; i < ctrl.taggingTokens.tokens.length; i++) {  // split by first token that is contained in data
+          var separator = KEY.toSeparator(ctrl.taggingTokens.tokens[i]) || ctrl.taggingTokens.tokens[i];
+          if (data.indexOf(separator) > -1) {
+            items = data.split(separator);
+            break;  // only split by one token
+          }
+        }
+        if (items.length === 0) {
+          items = [data];
+        }
+        var oldsearch = ctrl.search;
         angular.forEach(items, function (item) {
-          var newItem = ctrl.tagging.fct(item);
+          var newItem = ctrl.tagging.fct ? ctrl.tagging.fct(item) : item;
           if (newItem) {
             ctrl.select(newItem, true);
           }
         });
+        ctrl.search = oldsearch || EMPTY_SEARCH;
+        e.preventDefault();
+        e.stopPropagation();
+      } else if (ctrl.paste) {
+        ctrl.paste(data);
+        ctrl.search = EMPTY_SEARCH;
         e.preventDefault();
         e.stopPropagation();
       }
@@ -780,10 +1008,28 @@ uis.controller('uiSelectCtrl',
     }
   }
 
+  var onResize = $$uisDebounce(function() {
+    ctrl.sizeSearchInput();
+  }, 50);
+
+  angular.element($window).bind('resize', onResize);
+
   $scope.$on('$destroy', function() {
     ctrl.searchInput.off('keyup keydown tagged blur paste');
+    angular.element($window).off('resize', onResize);
   });
 
+  $scope.$watch('$select.activeIndex', function(activeIndex) {
+    if (activeIndex)
+      $element.find('input').attr(
+        'aria-activedescendant',
+        'ui-select-choices-row-' + ctrl.generatedId + '-' + activeIndex);
+  });
+
+  $scope.$watch('$select.open', function(open) {
+    if (!open)
+      $element.find('input').removeAttr('aria-activedescendant');
+  });
 }]);
 
 uis.directive('uiSelect',
@@ -805,6 +1051,14 @@ uis.directive('uiSelect',
     controllerAs: '$select',
     compile: function(tElement, tAttrs) {
 
+      // Allow setting ngClass on uiSelect
+      var match = /{(.*)}\s*{(.*)}/.exec(tAttrs.ngClass);
+      if(match) {
+        var combined = '{'+ match[1] +', '+ match[2] +'}';
+        tAttrs.ngClass = combined;
+        tElement.attr('ng-class', combined);
+      }
+
       //Multiple or Single depending if multiple attribute presence
       if (angular.isDefined(tAttrs.multiple))
         tElement.append('<ui-select-multiple/>').removeAttr('multiple');
@@ -832,12 +1086,14 @@ uis.directive('uiSelect',
           }
         }();
 
+        scope.$watch('skipFocusser', function() {
+            var skipFocusser = scope.$eval(attrs.skipFocusser);
+            $select.skipFocusser = skipFocusser !== undefined ? skipFocusser : uiSelectConfig.skipFocusser;
+        });
+
         $select.onSelectCallback = $parse(attrs.onSelect);
         $select.onRemoveCallback = $parse(attrs.onRemove);
 
-        //Limit the number of selections allowed
-        $select.limit = (angular.isDefined(attrs.limit)) ? parseInt(attrs.limit, 10) : undefined;
-
         //Set reference to ngModel from uiSelectCtrl
         $select.ngModel = ngModel;
 
@@ -852,9 +1108,8 @@ uis.directive('uiSelect',
           });
         }
 
-        scope.$watch('searchEnabled', function() {
-            var searchEnabled = scope.$eval(attrs.searchEnabled);
-            $select.searchEnabled = searchEnabled !== undefined ? searchEnabled : uiSelectConfig.searchEnabled;
+        scope.$watch(function () { return scope.$eval(attrs.searchEnabled); }, function(newVal) {
+          $select.searchEnabled = newVal !== undefined ? newVal : uiSelectConfig.searchEnabled;
         });
 
         scope.$watch('sortable', function() {
@@ -862,6 +1117,16 @@ uis.directive('uiSelect',
             $select.sortable = sortable !== undefined ? sortable : uiSelectConfig.sortable;
         });
 
+        attrs.$observe('limit', function() {
+          //Limit the number of selections allowed
+          $select.limit = (angular.isDefined(attrs.limit)) ? parseInt(attrs.limit, 10) : undefined;
+        });
+
+        scope.$watch('removeSelected', function() {
+            var removeSelected = scope.$eval(attrs.removeSelected);
+            $select.removeSelected = removeSelected !== undefined ? removeSelected : uiSelectConfig.removeSelected;
+        });
+
         attrs.$observe('disabled', function() {
           // No need to use $eval() (thanks to ng-disabled) since we already get a boolean instead of a string
           $select.disabled = attrs.disabled !== undefined ? attrs.disabled : false;
@@ -873,6 +1138,10 @@ uis.directive('uiSelect',
           $select.resetSearchInput = resetSearchInput !== undefined ? resetSearchInput : true;
         });
 
+        attrs.$observe('paste', function() {
+          $select.paste = scope.$eval(attrs.paste);
+        });
+
         attrs.$observe('tagging', function() {
           if(attrs.tagging !== undefined)
           {
@@ -908,6 +1177,17 @@ uis.directive('uiSelect',
           }
         });
 
+        attrs.$observe('spinnerEnabled', function() {
+          // $eval() is needed otherwise we get a string instead of a boolean
+          var spinnerEnabled = scope.$eval(attrs.spinnerEnabled);
+          $select.spinnerEnabled = spinnerEnabled !== undefined ? spinnerEnabled : uiSelectConfig.spinnerEnabled;
+        });
+
+        attrs.$observe('spinnerClass', function() {
+          var spinnerClass = attrs.spinnerClass;
+          $select.spinnerClass = spinnerClass !== undefined ? attrs.spinnerClass : uiSelectConfig.spinnerClass;
+        });
+
         //Automatically gets focus when loaded
         if (angular.isDefined(attrs.autofocus)){
           $timeout(function(){
@@ -938,11 +1218,16 @@ uis.directive('uiSelect',
           }
 
           if (!contains && !$select.clickTriggeredSelect) {
-            //Will lose focus only with certain targets
-            var focusableControls = ['input','button','textarea'];
-            var targetController = angular.element(e.target).controller('uiSelect'); //To check if target is other ui-select
-            var skipFocusser = targetController && targetController !== $select; //To check if target is other ui-select
-            if (!skipFocusser) skipFocusser =  ~focusableControls.indexOf(e.target.tagName.toLowerCase()); //Check if target is input, button or textarea
+            var skipFocusser;
+            if (!$select.skipFocusser) {
+              //Will lose focus only with certain targets
+              var focusableControls = ['input','button','textarea','select'];
+              var targetController = angular.element(e.target).controller('uiSelect'); //To check if target is other ui-select
+              skipFocusser = targetController && targetController !== $select; //To check if target is other ui-select
+              if (!skipFocusser) skipFocusser =  ~focusableControls.indexOf(e.target.tagName.toLowerCase()); //Check if target is input, button or textarea
+            } else {
+              skipFocusser = true;
+            }
             $select.close(skipFocusser);
             scope.$digest();
           }
@@ -980,6 +1265,13 @@ uis.directive('uiSelect',
             throw uiSelectMinErr('transcluded', "Expected 1 .ui-select-choices but got '{0}'.", transcludedChoices.length);
           }
           element.querySelectorAll('.ui-select-choices').replaceWith(transcludedChoices);
+
+          var transcludedNoChoice = transcluded.querySelectorAll('.ui-select-no-choice');
+          transcludedNoChoice.removeAttr('ui-select-no-choice'); //To avoid loop in case directive as attr
+          transcludedNoChoice.removeAttr('data-ui-select-no-choice'); // Properly handle HTML5 data-attributes
+          if (transcludedNoChoice.length == 1) {
+            element.querySelectorAll('.ui-select-no-choice').replaceWith(transcludedNoChoice);
+          }
         });
 
         // Support for appending the select field to the body when its open
@@ -1041,6 +1333,9 @@ uis.directive('uiSelect',
           element[0].style.left = '';
           element[0].style.top = '';
           element[0].style.width = originalWidth;
+
+          // Set focus back on to the moved element
+          $select.setFocus();
         }
 
         // Hold on to a reference to the .ui-select-dropdown element for direction support.
@@ -1079,57 +1374,75 @@ uis.directive('uiSelect',
 
         };
 
-        scope.calculateDropdownPos = function(){
-
-          if ($select.open) {
-            dropdown = angular.element(element).querySelectorAll('.ui-select-dropdown');
-            if (dropdown.length === 0) {
-              return;
-            }
+        var calculateDropdownPosAfterAnimation = function() {
+          // Delay positioning the dropdown until all choices have been added so its height is correct.
+          $timeout(function() {
+            if ($select.dropdownPosition === 'up') {
+              //Go UP
+              setDropdownPosUp();
+            } else {
+              //AUTO
+              element.removeClass(directionUpClassName);
 
-            // Hide the dropdown so there is no flicker until $timeout is done executing.
-            dropdown[0].style.opacity = 0;
+              var offset = uisOffset(element);
+              var offsetDropdown = uisOffset(dropdown);
 
-            // Delay positioning the dropdown until all choices have been added so its height is correct.
-            $timeout(function(){
+              //https://code.google.com/p/chromium/issues/detail?id=342307#c4
+              var scrollTop = $document[0].documentElement.scrollTop || $document[0].body.scrollTop; //To make it cross browser (blink, webkit, IE, Firefox).
 
-              if ($select.dropdownPosition === 'up'){
-                  //Go UP
-                  setDropdownPosUp(offset, offsetDropdown);
+              // Determine if the direction of the dropdown needs to be changed.
+              if (offset.top + offset.height + offsetDropdown.height > scrollTop + $document[0].documentElement.clientHeight) {
+                //Go UP
+                setDropdownPosUp(offset, offsetDropdown);
+              }else{
+                //Go DOWN
+                setDropdownPosDown(offset, offsetDropdown);
+              }
+            }
 
-              }else{ //AUTO
+            // Display the dropdown once it has been positioned.
+            dropdown[0].style.opacity = 1;
+          });
+        };
 
-                element.removeClass(directionUpClassName);
+        var opened = false;
 
-                var offset = uisOffset(element);
-                var offsetDropdown = uisOffset(dropdown);
+        scope.calculateDropdownPos = function() {
+          if ($select.open) {
+            dropdown = angular.element(element).querySelectorAll('.ui-select-dropdown');
 
-                //https://code.google.com/p/chromium/issues/detail?id=342307#c4
-                var scrollTop = $document[0].documentElement.scrollTop || $document[0].body.scrollTop; //To make it cross browser (blink, webkit, IE, Firefox).
+            if (dropdown.length === 0) {
+              return;
+            }
 
-                // Determine if the direction of the dropdown needs to be changed.
-                if (offset.top + offset.height + offsetDropdown.height > scrollTop + $document[0].documentElement.clientHeight) {
-                  //Go UP
-                  setDropdownPosUp(offset, offsetDropdown);
-                }else{
-                  //Go DOWN
-                  setDropdownPosDown(offset, offsetDropdown);
-                }
+           // Hide the dropdown so there is no flicker until $timeout is done executing.
+           if ($select.search === '' && !opened) {
+              dropdown[0].style.opacity = 0;
+              opened = true;
+           }
 
-              }
+            if (!uisOffset(dropdown).height && $select.$animate && $select.$animate.on && $select.$animate.enabled(dropdown)) {
+              var needsCalculated = true;
 
-              // Display the dropdown once it has been positioned.
-              dropdown[0].style.opacity = 1;
-            });
+              $select.$animate.on('enter', dropdown, function (elem, phase) {
+                if (phase === 'close' && needsCalculated) {
+                  calculateDropdownPosAfterAnimation();
+                  needsCalculated = false;
+                }
+              });
+            } else {
+              calculateDropdownPosAfterAnimation();
+            }
           } else {
-              if (dropdown === null || dropdown.length === 0) {
-                return;
-              }
+            if (dropdown === null || dropdown.length === 0) {
+              return;
+            }
 
-              // Reset the position of the dropdown.
-              dropdown[0].style.position = '';
-              dropdown[0].style.top = '';
-              element.removeClass(directionUpClassName);
+            // Reset the position of the dropdown.
+            dropdown[0].style.opacity = 0;
+            dropdown[0].style.position = '';
+            dropdown[0].style.top = '';
+            element.removeClass(directionUpClassName);
           }
         };
       };
@@ -1144,9 +1457,14 @@ uis.directive('uiSelectMatch', ['uiSelectConfig', function(uiSelectConfig) {
     replace: true,
     transclude: true,
     templateUrl: function(tElement) {
+      // Needed so the uiSelect can detect the transcluded content
+      tElement.addClass('ui-select-match');
+
+      var parent = tElement.parent();
       // Gets theme attribute from parent (ui-select)
-      var theme = tElement.parent().attr('theme') || uiSelectConfig.theme;
-      var multi = tElement.parent().attr('multiple');
+      var theme = getAttribute(parent, 'theme') || uiSelectConfig.theme;
+      var multi = angular.isDefined(getAttribute(parent, 'multiple'));
+
       return theme + (multi ? '/match-multiple.tpl.html' : '/match.tpl.html');
     },
     link: function(scope, element, attrs, $select) {
@@ -1168,6 +1486,17 @@ uis.directive('uiSelectMatch', ['uiSelectConfig', function(uiSelectConfig) {
 
     }
   };
+
+  function getAttribute(elem, attribute) {
+    if (elem[0].hasAttribute(attribute))
+      return elem.attr(attribute);
+
+    if (elem[0].hasAttribute('data-' + attribute))
+      return elem.attr('data-' + attribute);
+
+    if (elem[0].hasAttribute('x-' + attribute))
+      return elem.attr('x-' + attribute);
+  }
 }]);
 
 uis.directive('uiSelectMultiple', ['uiSelectMinErr','$timeout', function(uiSelectMinErr, $timeout) {
@@ -1181,6 +1510,9 @@ uis.directive('uiSelectMultiple', ['uiSelectMinErr','$timeout', function(uiSelec
           $select = $scope.$select,
           ngModel;
 
+      if (angular.isUndefined($select.selected))
+        $select.selected = [];
+
       //Wait for link fn to inject it
       $scope.$evalAsync(function(){ ngModel = $scope.ngModel; });
 
@@ -1195,17 +1527,21 @@ uis.directive('uiSelectMultiple', ['uiSelectMinErr','$timeout', function(uiSelec
         //Remove already selected items
         //e.g. When user clicks on a selection, the selected array changes and
         //the dropdown should remove that item
-        $select.refreshItems();
-        $select.sizeSearchInput();
+        if($select.refreshItems){
+          $select.refreshItems();
+        }
+        if($select.sizeSearchInput){
+          $select.sizeSearchInput();
+        }
       };
 
       // Remove item from multiple select
       ctrl.removeChoice = function(index){
 
-        var removedChoice = $select.selected[index];
+        // if the choice is locked, don't remove it
+        if($select.isLocked(null, index)) return false;
 
-        // if the choice is locked, can't remove it
-        if(removedChoice._uiSelectChoiceLocked) return;
+        var removedChoice = $select.selected[index];
 
         var locals = {};
         locals[$select.parserResult.itemName] = removedChoice;
@@ -1224,6 +1560,7 @@ uis.directive('uiSelectMultiple', ['uiSelectMinErr','$timeout', function(uiSelec
 
         ctrl.updateModel();
 
+        return true;
       };
 
       ctrl.getPlaceholder = function(){
@@ -1245,11 +1582,15 @@ uis.directive('uiSelectMultiple', ['uiSelectMinErr','$timeout', function(uiSelec
       //$select.selected = raw selected objects (ignoring any property binding)
 
       $select.multiple = true;
-      $select.removeSelected = true;
 
       //Input that will handle focus
       $select.focusInput = $select.searchInput;
 
+      //Properly check for empty if set to multiple
+      ngModel.$isEmpty = function(value) {
+        return !value || value.length === 0;
+      };
+
       //From view --> model
       ngModel.$parsers.unshift(function () {
         var locals = {},
@@ -1266,7 +1607,7 @@ uis.directive('uiSelectMultiple', ['uiSelectMinErr','$timeout', function(uiSelec
 
       // From model --> view
       ngModel.$formatters.unshift(function (inputValue) {
-        var data = $select.parserResult.source (scope, { $select : {search:''}}), //Overwrite $search
+        var data = $select.parserResult && $select.parserResult.source (scope, { $select : {search:''}}), //Overwrite $search
             locals = {},
             result;
         if (!data) return inputValue;
@@ -1277,10 +1618,13 @@ uis.directive('uiSelectMultiple', ['uiSelectMinErr','$timeout', function(uiSelec
             locals[$select.parserResult.itemName] = list[p];
             result = $select.parserResult.modelMapper(scope, locals);
             if($select.parserResult.trackByExp){
-                var matches = /\.(.+)/.exec($select.parserResult.trackByExp);
-                if(matches.length>0 && result[matches[1]] == value[matches[1]]){
-                    resultMultiple.unshift(list[p]);
-                    return true;
+                var propsItemNameMatches = /(\w*)\./.exec($select.parserResult.trackByExp);
+                var matches = /\.([^\s]+)/.exec($select.parserResult.trackByExp);
+                if(propsItemNameMatches && propsItemNameMatches.length > 0 && propsItemNameMatches[1] == $select.parserResult.itemName){
+                  if(matches && matches.length>0 && result[matches[1]] == value[matches[1]]){
+                      resultMultiple.unshift(list[p]);
+                      return true;
+                  }
                 }
             }
             if (angular.equals(result.toUpperCase(),value.toUpperCase())){
@@ -1307,7 +1651,10 @@ uis.directive('uiSelectMultiple', ['uiSelectMinErr','$timeout', function(uiSelec
       //Watch for external model changes
       scope.$watchCollection(function(){ return ngModel.$modelValue; }, function(newValue, oldValue) {
         if (oldValue != newValue){
-          ngModel.$modelValue = null; //Force scope model value and ngModel value to be out of sync to re-run formatters
+          //update the view value with fresh data from items, if there is a valid model value
+          if(angular.isDefined(ngModel.$modelValue)) {
+            ngModel.$modelValue = null; //Force scope model value and ngModel value to be out of sync to re-run formatters
+          }
           $selectMultiple.refreshComponent();
         }
       });
@@ -1317,12 +1664,13 @@ uis.directive('uiSelectMultiple', ['uiSelectMinErr','$timeout', function(uiSelec
         if(!angular.isArray(ngModel.$viewValue)){
           // Have tolerance for null or undefined values
           if(angular.isUndefined(ngModel.$viewValue) || ngModel.$viewValue === null){
-            $select.selected = [];
+            ngModel.$viewValue = [];
           } else {
             throw uiSelectMinErr('multiarr', "Expected model value to be array but got '{0}'", ngModel.$viewValue);
           }
         }
         $select.selected = ngModel.$viewValue;
+        $selectMultiple.refreshComponent();
         scope.$evalAsync(); //To force $digest
       };
 
@@ -1400,11 +1748,16 @@ uis.directive('uiSelectMultiple', ['uiSelectMinErr','$timeout', function(uiSelec
             case KEY.BACKSPACE:
               // Remove selected item and select previous/first
               if(~$selectMultiple.activeMatchIndex){
-                $selectMultiple.removeChoice(curr);
-                return prev;
+                if($selectMultiple.removeChoice(curr)) {
+                  return prev;
+                } else {
+                  return curr;
+                }
+
+              } else {
+                // If nothing yet selected, select last item
+                return last;
               }
-              // Select last item
-              else return last;
               break;
             case KEY.DELETE:
               // Remove selected item and select next item
@@ -1465,12 +1818,22 @@ uis.directive('uiSelectMultiple', ['uiSelectMinErr','$timeout', function(uiSelec
               stashArr = stashArr.slice(1,stashArr.length);
             }
             newItem = $select.tagging.fct($select.search);
-            newItem.isTag = true;
-            // verify the the tag doesn't match the value of an existing item
-            if ( stashArr.filter( function (origItem) { return angular.equals( origItem, $select.tagging.fct($select.search) ); } ).length > 0 ) {
+            // verify the new tag doesn't match the value of a possible selection choice or an already selected item.
+            if (
+              stashArr.some(function (origItem) {
+                 return angular.equals(origItem, newItem);
+              }) ||
+              $select.selected.some(function (origItem) {
+                return angular.equals(origItem, newItem);
+              })
+            ) {
+              scope.$evalAsync(function () {
+                $select.activeIndex = 0;
+                $select.items = items;
+              });
               return;
             }
-            newItem.isTag = true;
+            if (newItem) newItem.isTag = true;
           // handle newItem string and stripping dupes in tagging string context
           } else {
             // find any tagging items already in the $select.items array and store them
@@ -1519,12 +1882,23 @@ uis.directive('uiSelectMultiple', ['uiSelectMinErr','$timeout', function(uiSelec
             items = items.slice(dupeIndex+1,items.length-1);
           } else {
             items = [];
-            items.push(newItem);
+            if (newItem) items.push(newItem);
             items = items.concat(stashArr);
           }
           scope.$evalAsync( function () {
             $select.activeIndex = 0;
             $select.items = items;
+
+            if ($select.isGrouped) {
+              // update item references in groups, so that indexOf will work after angular.copy
+              var itemsWithoutTag = newItem ? items.slice(1) : items;
+              $select.setItemsFn(itemsWithoutTag);
+              if (newItem) {
+                // add tag item as a new group
+                $select.items.unshift(newItem);
+                $select.groups.unshift({name: '', items: [newItem], tagging: true});
+              }
+            }
           });
         }
       });
@@ -1555,9 +1929,11 @@ uis.directive('uiSelectMultiple', ['uiSelectMinErr','$timeout', function(uiSelec
             // handle the object tagging implementation
             } else {
               var mockObj = tempArr[i];
-              mockObj.isTag = true;
+              if (angular.isObject(mockObj)) {
+                mockObj.isTag = true;
+              }
               if ( angular.equals(mockObj, needle) ) {
-              dupeIndex = i;
+                dupeIndex = i;
               }
             }
           }
@@ -1575,6 +1951,24 @@ uis.directive('uiSelectMultiple', ['uiSelectMinErr','$timeout', function(uiSelec
   };
 }]);
 
+uis.directive('uiSelectNoChoice',
+    ['uiSelectConfig', function (uiSelectConfig) {
+        return {
+            restrict: 'EA',
+            require: '^uiSelect',
+            replace: true,
+            transclude: true,
+            templateUrl: function (tElement) {
+                // Needed so the uiSelect can detect the transcluded content
+                tElement.addClass('ui-select-no-choice');
+
+                // Gets theme attribute from parent (ui-select)
+                var theme = tElement.parent().attr('theme') || uiSelectConfig.theme;
+                return theme + '/no-choice.tpl.html';
+            }
+        };
+    }]);
+
 uis.directive('uiSelectSingle', ['$timeout','$compile', function($timeout, $compile) {
   return {
     restrict: 'EA',
@@ -1595,14 +1989,14 @@ uis.directive('uiSelectSingle', ['$timeout','$compile', function($timeout, $comp
 
       //From model --> view
       ngModel.$formatters.unshift(function (inputValue) {
-        var data = $select.parserResult.source (scope, { $select : {search:''}}), //Overwrite $search
+        var data = $select.parserResult && $select.parserResult.source (scope, { $select : {search:''}}), //Overwrite $search
             locals = {},
             result;
         if (data){
           var checkFnSingle = function(d){
             locals[$select.parserResult.itemName] = d;
             result = $select.parserResult.modelMapper(scope, locals);
-            return result == inputValue;
+            return result === inputValue;
           };
           //If possible pass same object stored in $select.selected
           if ($select.selected && checkFnSingle($select.selected)) {
@@ -1699,44 +2093,48 @@ uis.directive('uiSelectSingle', ['$timeout','$compile', function($timeout, $comp
     }
   };
 }]);
+
 // Make multiple matches sortable
 uis.directive('uiSelectSort', ['$timeout', 'uiSelectConfig', 'uiSelectMinErr', function($timeout, uiSelectConfig, uiSelectMinErr) {
   return {
-    require: '^uiSelect',
-    link: function(scope, element, attrs, $select) {
+    require: ['^^uiSelect', '^ngModel'],
+    link: function(scope, element, attrs, ctrls) {
       if (scope[attrs.uiSelectSort] === null) {
-        throw uiSelectMinErr('sort', "Expected a list to sort");
+        throw uiSelectMinErr('sort', 'Expected a list to sort');
       }
 
+      var $select = ctrls[0];
+      var $ngModel = ctrls[1];
+
       var options = angular.extend({
           axis: 'horizontal'
         },
         scope.$eval(attrs.uiSelectSortOptions));
 
-      var axis = options.axis,
-        draggingClassName = 'dragging',
-        droppingClassName = 'dropping',
-        droppingBeforeClassName = 'dropping-before',
-        droppingAfterClassName = 'dropping-after';
+      var axis = options.axis;
+      var draggingClassName = 'dragging';
+      var droppingClassName = 'dropping';
+      var droppingBeforeClassName = 'dropping-before';
+      var droppingAfterClassName = 'dropping-after';
 
       scope.$watch(function(){
         return $select.sortable;
-      }, function(n){
-        if (n) {
+      }, function(newValue){
+        if (newValue) {
           element.attr('draggable', true);
         } else {
           element.removeAttr('draggable');
         }
       });
 
-      element.on('dragstart', function(e) {
+      element.on('dragstart', function(event) {
         element.addClass(draggingClassName);
 
-        (e.dataTransfer || e.originalEvent.dataTransfer).setData('text/plain', scope.$index);
+        (event.dataTransfer || event.originalEvent.dataTransfer).setData('text', scope.$index.toString());
       });
 
       element.on('dragend', function() {
-        element.removeClass(draggingClassName);
+        removeClass(draggingClassName);
       });
 
       var move = function(from, to) {
@@ -1744,27 +2142,33 @@ uis.directive('uiSelectSort', ['$timeout', 'uiSelectConfig', 'uiSelectMinErr', f
         this.splice(to, 0, this.splice(from, 1)[0]);
       };
 
-      var dragOverHandler = function(e) {
-        e.preventDefault();
+      var removeClass = function(className) {
+        angular.forEach($select.$element.querySelectorAll('.' + className), function(el){
+          angular.element(el).removeClass(className);
+        });
+      };
 
-        var offset = axis === 'vertical' ? e.offsetY || e.layerY || (e.originalEvent ? e.originalEvent.offsetY : 0) : e.offsetX || e.layerX || (e.originalEvent ? e.originalEvent.offsetX : 0);
+      var dragOverHandler = function(event) {
+        event.preventDefault();
+
+        var offset = axis === 'vertical' ? event.offsetY || event.layerY || (event.originalEvent ? event.originalEvent.offsetY : 0) : event.offsetX || event.layerX || (event.originalEvent ? event.originalEvent.offsetX : 0);
 
         if (offset < (this[axis === 'vertical' ? 'offsetHeight' : 'offsetWidth'] / 2)) {
-          element.removeClass(droppingAfterClassName);
+          removeClass(droppingAfterClassName);
           element.addClass(droppingBeforeClassName);
 
         } else {
-          element.removeClass(droppingBeforeClassName);
+          removeClass(droppingBeforeClassName);
           element.addClass(droppingAfterClassName);
         }
       };
 
       var dropTimeout;
 
-      var dropHandler = function(e) {
-        e.preventDefault();
+      var dropHandler = function(event) {
+        event.preventDefault();
 
-        var droppedItemIndex = parseInt((e.dataTransfer || e.originalEvent.dataTransfer).getData('text/plain'), 10);
+        var droppedItemIndex = parseInt((event.dataTransfer || event.originalEvent.dataTransfer).getData('text'), 10);
 
         // prevent event firing multiple times in firefox
         $timeout.cancel(dropTimeout);
@@ -1774,9 +2178,9 @@ uis.directive('uiSelectSort', ['$timeout', 'uiSelectConfig', 'uiSelectMinErr', f
       };
 
       var _dropHandler = function(droppedItemIndex) {
-        var theList = scope.$eval(attrs.uiSelectSort),
-          itemToMove = theList[droppedItemIndex],
-          newIndex = null;
+        var theList = scope.$eval(attrs.uiSelectSort);
+        var itemToMove = theList[droppedItemIndex];
+        var newIndex = null;
 
         if (element.hasClass(droppingBeforeClassName)) {
           if (droppedItemIndex < scope.$index) {
@@ -1794,6 +2198,8 @@ uis.directive('uiSelectSort', ['$timeout', 'uiSelectConfig', 'uiSelectMinErr', f
 
         move.apply(theList, [droppedItemIndex, newIndex]);
 
+        $ngModel.$setViewValue(Date.now());
+
         scope.$apply(function() {
           scope.$emit('uiSelectSort:change', {
             array: theList,
@@ -1803,9 +2209,9 @@ uis.directive('uiSelectSort', ['$timeout', 'uiSelectConfig', 'uiSelectMinErr', f
           });
         });
 
-        element.removeClass(droppingClassName);
-        element.removeClass(droppingBeforeClassName);
-        element.removeClass(droppingAfterClassName);
+        removeClass(droppingClassName);
+        removeClass(droppingBeforeClassName);
+        removeClass(droppingAfterClassName);
 
         element.off('drop', dropHandler);
       };
@@ -1821,13 +2227,14 @@ uis.directive('uiSelectSort', ['$timeout', 'uiSelectConfig', 'uiSelectMinErr', f
         element.on('drop', dropHandler);
       });
 
-      element.on('dragleave', function(e) {
-        if (e.target != element) {
+      element.on('dragleave', function(event) {
+        if (event.target != element) {
           return;
         }
-        element.removeClass(droppingClassName);
-        element.removeClass(droppingBeforeClassName);
-        element.removeClass(droppingAfterClassName);
+
+        removeClass(droppingClassName);
+        removeClass(droppingBeforeClassName);
+        removeClass(droppingAfterClassName);
 
         element.off('dragover', dragOverHandler);
         element.off('drop', dropHandler);
@@ -1836,6 +2243,51 @@ uis.directive('uiSelectSort', ['$timeout', 'uiSelectConfig', 'uiSelectMinErr', f
   };
 }]);
 
+/**
+ * Debounces functions
+ *
+ * Taken from UI Bootstrap $$debounce source code
+ * See https://github.com/angular-ui/bootstrap/blob/master/src/debounce/debounce.js
+ *
+ */
+uis.factory('$$uisDebounce', ['$timeout', function($timeout) {
+  return function(callback, debounceTime) {
+    var timeoutPromise;
+
+    return function() {
+      var self = this;
+      var args = Array.prototype.slice.call(arguments);
+      if (timeoutPromise) {
+        $timeout.cancel(timeoutPromise);
+      }
+
+      timeoutPromise = $timeout(function() {
+        callback.apply(self, args);
+      }, debounceTime);
+    };
+  };
+}]);
+
+uis.directive('uisOpenClose', ['$parse', '$timeout', function ($parse, $timeout) {
+  return {
+    restrict: 'A',
+    require: 'uiSelect',
+    link: function (scope, element, attrs, $select) {
+      $select.onOpenCloseCallback = $parse(attrs.uisOpenClose);
+
+      scope.$watch('$select.open', function (isOpen, previousState) {
+        if (isOpen !== previousState) {
+          $timeout(function () {
+            $select.onOpenCloseCallback(scope, {
+              isOpen: isOpen
+            });
+          });
+        }
+      });
+    }
+  };
+}]);
+
 /**
  * Parses "repeat" attribute.
  *
@@ -1860,38 +2312,48 @@ uis.service('uisRepeatParser', ['uiSelectMinErr','$parse', function(uiSelectMinE
 
 
     var match;
-    var isObjectCollection = /\(\s*([\$\w][\$\w]*)\s*,\s*([\$\w][\$\w]*)\s*\)/.test(expression);
+    //var isObjectCollection = /\(\s*([\$\w][\$\w]*)\s*,\s*([\$\w][\$\w]*)\s*\)/.test(expression);
     // If an array is used as collection
 
     // if (isObjectCollection){
-      //00000000000000000000000000000111111111000000000000000222222222222220033333333333333333333330000444444444444444444000000000000000556666660000077777777777755000000000000000000000088888880000000
-    match = expression.match(/^\s*(?:([\s\S]+?)\s+as\s+)?(?:([\$\w][\$\w]*)|(?:\(\s*([\$\w][\$\w]*)\s*,\s*([\$\w][\$\w]*)\s*\)))\s+in\s+(([\w\.]+)?\s*(|\s*[\s\S]+?))?(?:\s+track\s+by\s+([\s\S]+?))?\s*$/);
+    // 000000000000000000000000000000111111111000000000000000222222222222220033333333333333333333330000444444444444444444000000000000000055555555555000000000000000000000066666666600000000
+    match = expression.match(/^\s*(?:([\s\S]+?)\s+as\s+)?(?:([\$\w][\$\w]*)|(?:\(\s*([\$\w][\$\w]*)\s*,\s*([\$\w][\$\w]*)\s*\)))\s+in\s+(\s*[\s\S]+?)?(?:\s+track\s+by\s+([\s\S]+?))?\s*$/);
 
     // 1 Alias
     // 2 Item
     // 3 Key on (key,value)
     // 4 Value on (key,value)
-    // 5 Collection expresion (only used when using an array collection)
-    // 6 Object that will be converted to Array when using (key,value) syntax
-    // 7 Filters that will be applied to #6 when using (key,value) syntax
-    // 8 Track by
+    // 5 Source expression (including filters)
+    // 6 Track by
 
     if (!match) {
       throw uiSelectMinErr('iexp', "Expected expression in form of '_item_ in _collection_[ track by _id_]' but got '{0}'.",
               expression);
     }
-    if (!match[6] && isObjectCollection) {
-      throw uiSelectMinErr('iexp', "Expected expression in form of '_item_ as (_key_, _item_) in _ObjCollection_ [ track by _id_]' but got '{0}'.",
-              expression);
+
+    var source = match[5],
+        filters = '';
+
+    // When using (key,value) ui-select requires filters to be extracted, since the object
+    // is converted to an array for $select.items
+    // (in which case the filters need to be reapplied)
+    if (match[3]) {
+      // Remove any enclosing parenthesis
+      source = match[5].replace(/(^\()|(\)$)/g, '');
+      // match all after | but not after ||
+      var filterMatch = match[5].match(/^\s*(?:[\s\S]+?)(?:[^\|]|\|\|)+([\s\S]*)\s*$/);
+      if(filterMatch && filterMatch[1].trim()) {
+        filters = filterMatch[1];
+        source = source.replace(filters, '');
+      }
     }
 
     return {
       itemName: match[4] || match[2], // (lhs) Left-hand side,
       keyName: match[3], //for (key, value) syntax
-      source: $parse(!match[3] ? match[5] : match[6]),
-      sourceName: match[6],
-      filters: match[7],
-      trackByExp: match[8],
+      source: $parse(source),
+      filters: filters,
+      trackByExp: match[6],
       modelMapper: $parse(match[1] || match[4] || match[2]),
       repeatExpression: function (grouped) {
         var expression = this.itemName + ' in ' + (grouped ? '$group.items' : '$select.items');
@@ -1905,22 +2367,27 @@ uis.service('uisRepeatParser', ['uiSelectMinErr','$parse', function(uiSelectMinE
   };
 
   self.getGroupNgRepeatExpression = function() {
-    return '$group in $select.groups';
+    return '$group in $select.groups track by $group.name';
   };
 
 }]);
 
 }());
-angular.module("ui.select").run(["$templateCache", function($templateCache) {$templateCache.put("bootstrap/choices.tpl.html","<ul class=\"ui-select-choices ui-select-choices-content ui-select-dropdown dropdown-menu\" role=\"listbox\" ng-show=\"$select.items.length > 0\"><li class=\"ui-select-choices-group\" id=\"ui-select-choices-{{ $select.generatedId }}\"><div class=\"divider\" ng-show=\"$select.isGrouped && $index > 0\"></div><div ng-show=\"$select.isGrouped\" class=\"ui-select-choices-group-label dropdown-header\" ng-bind=\"$group.name\"></div><div id=\"ui-select-choices-row-{{ $select.generatedId }}-{{$index}}\" class=\"ui-select-choices-row\" ng-class=\"{active: $select.isActive(this), disabled: $select.isDisabled(this)}\" role=\"option\"><a href=\"javascript:void(0)\" class=\"ui-select-choices-row-inner\"></a></div></li></ul>");
-$templateCache.put("bootstrap/match-multiple.tpl.html","<span class=\"ui-select-match\"><span ng-repeat=\"$item in $select.selected\"><span class=\"ui-select-match-item btn btn-default btn-xs\" tabindex=\"-1\" type=\"button\" ng-disabled=\"$select.disabled\" ng-click=\"$selectMultiple.activeMatchIndex = $index;\" ng-class=\"{\'btn-primary\':$selectMultiple.activeMatchIndex === $index, \'select-locked\':$select.isLocked(this, $index)}\" ui-select-sort=\"$select.selected\"><span class=\"close ui-select-match-close\" ng-hide=\"$select.disabled\" ng-click=\"$selectMultiple.removeChoice($index)\">&nbsp;&times;</span> <span uis-transclude-append=\"\"></span></span></span></span>");
-$templateCache.put("bootstrap/match.tpl.html","<div class=\"ui-select-match\" ng-hide=\"$select.open\" ng-disabled=\"$select.disabled\" ng-class=\"{\'btn-default-focus\':$select.focus}\"><span tabindex=\"-1\" class=\"btn btn-default form-control ui-select-toggle\" aria-label=\"{{ $select.baseTitle }} activate\" ng-disabled=\"$select.disabled\" ng-click=\"$select.activate()\" style=\"outline: 0;\"><span ng-show=\"$select.isEmpty()\" class=\"ui-select-placeholder text-muted\">{{$select.placeholder}}</span> <span ng-hide=\"$select.isEmpty()\" class=\"ui-select-match-text pull-left\" ng-class=\"{\'ui-select-allow-clear\': $select.allowClear && !$select.isEmpty()}\" ng-transclude=\"\"></span> <i class=\"caret pull-right\" ng-click=\"$select.toggle($event)\"></i> <a ng-show=\"$select.allowClear && !$select.isEmpty()\" aria-label=\"{{ $select.baseTitle }} clear\" style=\"margin-right: 10px\" ng-click=\"$select.clear($event)\" class=\"btn btn-xs btn-link pull-right\"><i class=\"glyphicon gl
 yphicon-remove\" aria-hidden=\"true\"></i></a></span></div>");
-$templateCache.put("bootstrap/select-multiple.tpl.html","<div class=\"ui-select-container ui-select-multiple ui-select-bootstrap dropdown form-control\" ng-class=\"{open: $select.open}\"><div><div class=\"ui-select-match\"></div><input type=\"text\" autocomplete=\"false\" autocorrect=\"off\" autocapitalize=\"off\" spellcheck=\"false\" class=\"ui-select-search input-xs\" placeholder=\"{{$selectMultiple.getPlaceholder()}}\" ng-disabled=\"$select.disabled\" ng-hide=\"$select.disabled\" ng-click=\"$select.activate()\" ng-model=\"$select.search\" role=\"combobox\" aria-label=\"{{ $select.baseTitle }}\" ondrop=\"return false;\"></div><div class=\"ui-select-choices\"></div></div>");
-$templateCache.put("bootstrap/select.tpl.html","<div class=\"ui-select-container ui-select-bootstrap dropdown\" ng-class=\"{open: $select.open}\"><div class=\"ui-select-match\"></div><input type=\"text\" autocomplete=\"false\" tabindex=\"-1\" aria-expanded=\"true\" aria-label=\"{{ $select.baseTitle }}\" aria-owns=\"ui-select-choices-{{ $select.generatedId }}\" aria-activedescendant=\"ui-select-choices-row-{{ $select.generatedId }}-{{ $select.activeIndex }}\" class=\"form-control ui-select-search\" placeholder=\"{{$select.placeholder}}\" ng-model=\"$select.search\" ng-show=\"$select.searchEnabled && $select.open\"><div class=\"ui-select-choices\"></div></div>");
-$templateCache.put("selectize/choices.tpl.html","<div ng-show=\"$select.open\" class=\"ui-select-choices ui-select-dropdown selectize-dropdown single\"><div class=\"ui-select-choices-content selectize-dropdown-content\"><div class=\"ui-select-choices-group optgroup\" role=\"listbox\"><div ng-show=\"$select.isGrouped\" class=\"ui-select-choices-group-label optgroup-header\" ng-bind=\"$group.name\"></div><div role=\"option\" class=\"ui-select-choices-row\" ng-class=\"{active: $select.isActive(this), disabled: $select.isDisabled(this)}\"><div class=\"option ui-select-choices-row-inner\" data-selectable=\"\"></div></div></div></div></div>");
-$templateCache.put("selectize/match.tpl.html","<div ng-hide=\"($select.open || $select.isEmpty())\" class=\"ui-select-match\" ng-transclude=\"\"></div>");
-$templateCache.put("selectize/select.tpl.html","<div class=\"ui-select-container selectize-control single\" ng-class=\"{\'open\': $select.open}\"><div class=\"selectize-input\" ng-class=\"{\'focus\': $select.open, \'disabled\': $select.disabled, \'selectize-focus\' : $select.focus}\" ng-click=\"$select.activate()\"><div class=\"ui-select-match\"></div><input type=\"text\" autocomplete=\"false\" tabindex=\"-1\" class=\"ui-select-search ui-select-toggle\" ng-click=\"$select.toggle($event)\" placeholder=\"{{$select.placeholder}}\" ng-model=\"$select.search\" ng-hide=\"!$select.searchEnabled || ($select.selected && !$select.open)\" ng-disabled=\"$select.disabled\" aria-label=\"{{ $select.baseTitle }}\"></div><div class=\"ui-select-choices\"></div></div>");
-$templateCache.put("select2/choices.tpl.html","<ul class=\"ui-select-choices ui-select-choices-content select2-results\"><li class=\"ui-select-choices-group\" ng-class=\"{\'select2-result-with-children\': $select.choiceGrouped($group) }\"><div ng-show=\"$select.choiceGrouped($group)\" class=\"ui-select-choices-group-label select2-result-label\" ng-bind=\"$group.name\"></div><ul role=\"listbox\" id=\"ui-select-choices-{{ $select.generatedId }}\" ng-class=\"{\'select2-result-sub\': $select.choiceGrouped($group), \'select2-result-single\': !$select.choiceGrouped($group) }\"><li role=\"option\" id=\"ui-select-choices-row-{{ $select.generatedId }}-{{$index}}\" class=\"ui-select-choices-row\" ng-class=\"{\'select2-highlighted\': $select.isActive(this), \'select2-disabled\': $select.isDisabled(this)}\"><div class=\"select2-result-label ui-select-choices-row-inner\"></div></li></ul></li></ul>");
-$templateCache.put("select2/match-multiple.tpl.html","<span class=\"ui-select-match\"><li class=\"ui-select-match-item select2-search-choice\" ng-repeat=\"$item in $select.selected\" ng-class=\"{\'select2-search-choice-focus\':$selectMultiple.activeMatchIndex === $index, \'select2-locked\':$select.isLocked(this, $index)}\" ui-select-sort=\"$select.selected\"><span uis-transclude-append=\"\"></span> <a href=\"javascript:;\" class=\"ui-select-match-close select2-search-choice-close\" ng-click=\"$selectMultiple.removeChoice($index)\" tabindex=\"-1\"></a></li></span>");
+angular.module("ui.select").run(["$templateCache", function($templateCache) {$templateCache.put("bootstrap/choices.tpl.html","<ul class=\"ui-select-choices ui-select-choices-content ui-select-dropdown dropdown-menu\" ng-show=\"$select.open && $select.items.length > 0\"><li class=\"ui-select-choices-group\" id=\"ui-select-choices-{{ $select.generatedId }}\"><div class=\"divider\" ng-show=\"$select.isGrouped && $index > 0\"></div><div ng-show=\"$select.isGrouped\" class=\"ui-select-choices-group-label dropdown-header\" ng-bind=\"$group.name\"></div><div ng-attr-id=\"ui-select-choices-row-{{ $select.generatedId }}-{{$index}}\" class=\"ui-select-choices-row\" ng-class=\"{active: $select.isActive(this), disabled: $select.isDisabled(this)}\" role=\"option\"><span class=\"ui-select-choices-row-inner\"></span></div></li></ul>");
+$templateCache.put("bootstrap/match-multiple.tpl.html","<span class=\"ui-select-match\"><span ng-repeat=\"$item in $select.selected track by $index\"><span class=\"ui-select-match-item btn btn-default btn-xs\" tabindex=\"-1\" type=\"button\" ng-disabled=\"$select.disabled\" ng-click=\"$selectMultiple.activeMatchIndex = $index;\" ng-class=\"{\'btn-primary\':$selectMultiple.activeMatchIndex === $index, \'select-locked\':$select.isLocked(this, $index)}\" ui-select-sort=\"$select.selected\"><span class=\"close ui-select-match-close\" ng-hide=\"$select.disabled\" ng-click=\"$selectMultiple.removeChoice($index)\">&nbsp;&times;</span> <span uis-transclude-append=\"\"></span></span></span></span>");
+$templateCache.put("bootstrap/match.tpl.html","<div class=\"ui-select-match\" ng-hide=\"$select.open && $select.searchEnabled\" ng-disabled=\"$select.disabled\" ng-class=\"{\'btn-default-focus\':$select.focus}\"><span tabindex=\"-1\" class=\"btn btn-default form-control ui-select-toggle\" aria-label=\"{{ $select.baseTitle }} activate\" ng-disabled=\"$select.disabled\" ng-click=\"$select.activate()\" style=\"outline: 0;\"><span ng-show=\"$select.isEmpty()\" class=\"ui-select-placeholder text-muted\">{{$select.placeholder}}</span> <span ng-hide=\"$select.isEmpty()\" class=\"ui-select-match-text pull-left\" ng-class=\"{\'ui-select-allow-clear\': $select.allowClear && !$select.isEmpty()}\" ng-transclude=\"\"></span> <i class=\"caret pull-right\" ng-click=\"$select.toggle($event)\"></i> <a ng-show=\"$select.allowClear && !$select.isEmpty() && ($select.disabled !== true)\" aria-label=\"{{ $select.baseTitle }} clear\" style=\"margin-right: 10px\" ng-click=\"$select.clear($event)\" class=\"
 btn btn-xs btn-link pull-right\"><i class=\"glyphicon glyphicon-remove\" aria-hidden=\"true\"></i></a></span></div>");
+$templateCache.put("bootstrap/no-choice.tpl.html","<ul class=\"ui-select-no-choice dropdown-menu\" ng-show=\"$select.items.length == 0\"><li ng-transclude=\"\"></li></ul>");
+$templateCache.put("bootstrap/select-multiple.tpl.html","<div class=\"ui-select-container ui-select-multiple ui-select-bootstrap dropdown form-control\" ng-class=\"{open: $select.open}\"><div><div class=\"ui-select-match\"></div><input type=\"search\" autocomplete=\"off\" autocorrect=\"off\" autocapitalize=\"off\" spellcheck=\"false\" class=\"ui-select-search input-xs\" placeholder=\"{{$selectMultiple.getPlaceholder()}}\" ng-disabled=\"$select.disabled\" ng-click=\"$select.activate()\" ng-model=\"$select.search\" role=\"combobox\" aria-expanded=\"{{$select.open}}\" aria-label=\"{{$select.baseTitle}}\" ng-class=\"{\'spinner\': $select.refreshing}\" ondrop=\"return false;\"></div><div class=\"ui-select-choices\"></div><div class=\"ui-select-no-choice\"></div></div>");
+$templateCache.put("bootstrap/select.tpl.html","<div class=\"ui-select-container ui-select-bootstrap dropdown\" ng-class=\"{open: $select.open}\"><div class=\"ui-select-match\"></div><span ng-show=\"$select.open && $select.refreshing && $select.spinnerEnabled\" class=\"ui-select-refreshing {{$select.spinnerClass}}\"></span> <input type=\"search\" autocomplete=\"off\" tabindex=\"-1\" aria-expanded=\"true\" aria-label=\"{{ $select.baseTitle }}\" aria-owns=\"ui-select-choices-{{ $select.generatedId }}\" class=\"form-control ui-select-search\" ng-class=\"{ \'ui-select-search-hidden\' : !$select.searchEnabled }\" placeholder=\"{{$select.placeholder}}\" ng-model=\"$select.search\" ng-show=\"$select.open\"><div class=\"ui-select-choices\"></div><div class=\"ui-select-no-choice\"></div></div>");
+$templateCache.put("select2/choices.tpl.html","<ul tabindex=\"-1\" class=\"ui-select-choices ui-select-choices-content select2-results\"><li class=\"ui-select-choices-group\" ng-class=\"{\'select2-result-with-children\': $select.choiceGrouped($group) }\"><div ng-show=\"$select.choiceGrouped($group)\" class=\"ui-select-choices-group-label select2-result-label\" ng-bind=\"$group.name\"></div><ul id=\"ui-select-choices-{{ $select.generatedId }}\" ng-class=\"{\'select2-result-sub\': $select.choiceGrouped($group), \'select2-result-single\': !$select.choiceGrouped($group) }\"><li role=\"option\" ng-attr-id=\"ui-select-choices-row-{{ $select.generatedId }}-{{$index}}\" class=\"ui-select-choices-row\" ng-class=\"{\'select2-highlighted\': $select.isActive(this), \'select2-disabled\': $select.isDisabled(this)}\"><div class=\"select2-result-label ui-select-choices-row-inner\"></div></li></ul></li></ul>");
+$templateCache.put("select2/match-multiple.tpl.html","<span class=\"ui-select-match\"><li class=\"ui-select-match-item select2-search-choice\" ng-repeat=\"$item in $select.selected track by $index\" ng-class=\"{\'select2-search-choice-focus\':$selectMultiple.activeMatchIndex === $index, \'select2-locked\':$select.isLocked(this, $index)}\" ui-select-sort=\"$select.selected\"><span uis-transclude-append=\"\"></span> <a href=\"javascript:;\" class=\"ui-select-match-close select2-search-choice-close\" ng-click=\"$selectMultiple.removeChoice($index)\" tabindex=\"-1\"></a></li></span>");
 $templateCache.put("select2/match.tpl.html","<a class=\"select2-choice ui-select-match\" ng-class=\"{\'select2-default\': $select.isEmpty()}\" ng-click=\"$select.toggle($event)\" aria-label=\"{{ $select.baseTitle }} select\"><span ng-show=\"$select.isEmpty()\" class=\"select2-chosen\">{{$select.placeholder}}</span> <span ng-hide=\"$select.isEmpty()\" class=\"select2-chosen\" ng-transclude=\"\"></span> <abbr ng-if=\"$select.allowClear && !$select.isEmpty()\" class=\"select2-search-choice-close\" ng-click=\"$select.clear($event)\"></abbr> <span class=\"select2-arrow ui-select-toggle\"><b></b></span></a>");
-$templateCache.put("select2/select-multiple.tpl.html","<div class=\"ui-select-container ui-select-multiple select2 select2-container select2-container-multi\" ng-class=\"{\'select2-container-active select2-dropdown-open open\': $select.open, \'select2-container-disabled\': $select.disabled}\"><ul class=\"select2-choices\"><span class=\"ui-select-match\"></span><li class=\"select2-search-field\"><input type=\"text\" autocomplete=\"false\" autocorrect=\"off\" autocapitalize=\"off\" spellcheck=\"false\" role=\"combobox\" aria-expanded=\"true\" aria-owns=\"ui-select-choices-{{ $select.generatedId }}\" aria-label=\"{{ $select.baseTitle }}\" aria-activedescendant=\"ui-select-choices-row-{{ $select.generatedId }}-{{ $select.activeIndex }}\" class=\"select2-input ui-select-search\" placeholder=\"{{$selectMultiple.getPlaceholder()}}\" ng-disabled=\"$select.disabled\" ng-hide=\"$select.disabled\" ng-model=\"$select.search\" ng-click=\"$select.activate()\" style=\"width: 34px;\" ondrop=\"retur
 n false;\"></li></ul><div class=\"ui-select-dropdown select2-drop select2-with-searchbox select2-drop-active\" ng-class=\"{\'select2-display-none\': !$select.open}\"><div class=\"ui-select-choices\"></div></div></div>");
-$templateCache.put("select2/select.tpl.html","<div class=\"ui-select-container select2 select2-container\" ng-class=\"{\'select2-container-active select2-dropdown-open open\': $select.open, \'select2-container-disabled\': $select.disabled, \'select2-container-active\': $select.focus, \'select2-allowclear\': $select.allowClear && !$select.isEmpty()}\"><div class=\"ui-select-match\"></div><div class=\"ui-select-dropdown select2-drop select2-with-searchbox select2-drop-active\" ng-class=\"{\'select2-display-none\': !$select.open}\"><div class=\"select2-search\" ng-show=\"$select.searchEnabled\"><input type=\"text\" autocomplete=\"false\" autocorrect=\"false\" autocapitalize=\"off\" spellcheck=\"false\" role=\"combobox\" aria-expanded=\"true\" aria-owns=\"ui-select-choices-{{ $select.generatedId }}\" aria-label=\"{{ $select.baseTitle }}\" aria-activedescendant=\"ui-select-choices-row-{{ $select.generatedId }}-{{ $select.activeIndex }}\" class=\"ui-select-search select2-input\" ng-model=
 \"$select.search\"></div><div class=\"ui-select-choices\"></div></div></div>");}]);
+$templateCache.put("select2/no-choice.tpl.html","<div class=\"ui-select-no-choice dropdown\" ng-show=\"$select.items.length == 0\"><div class=\"dropdown-content\"><div data-selectable=\"\" ng-transclude=\"\"></div></div></div>");
+$templateCache.put("select2/select-multiple.tpl.html","<div class=\"ui-select-container ui-select-multiple select2 select2-container select2-container-multi\" ng-class=\"{\'select2-container-active select2-dropdown-open open\': $select.open, \'select2-container-disabled\': $select.disabled}\"><ul class=\"select2-choices\"><span class=\"ui-select-match\"></span><li class=\"select2-search-field\"><input type=\"search\" autocomplete=\"off\" autocorrect=\"off\" autocapitalize=\"off\" spellcheck=\"false\" role=\"combobox\" aria-expanded=\"true\" aria-owns=\"ui-select-choices-{{ $select.generatedId }}\" aria-label=\"{{ $select.baseTitle }}\" aria-activedescendant=\"ui-select-choices-row-{{ $select.generatedId }}-{{ $select.activeIndex }}\" class=\"select2-input ui-select-search\" placeholder=\"{{$selectMultiple.getPlaceholder()}}\" ng-disabled=\"$select.disabled\" ng-hide=\"$select.disabled\" ng-model=\"$select.search\" ng-click=\"$select.activate()\" style=\"width: 34px;\" ondrop=\"retur
 n false;\"></li></ul><div class=\"ui-select-dropdown select2-drop select2-with-searchbox select2-drop-active\" ng-class=\"{\'select2-display-none\': !$select.open || $select.items.length === 0}\"><div class=\"ui-select-choices\"></div></div></div>");
+$templateCache.put("select2/select.tpl.html","<div class=\"ui-select-container select2 select2-container\" ng-class=\"{\'select2-container-active select2-dropdown-open open\': $select.open, \'select2-container-disabled\': $select.disabled, \'select2-container-active\': $select.focus, \'select2-allowclear\': $select.allowClear && !$select.isEmpty()}\"><div class=\"ui-select-match\"></div><div class=\"ui-select-dropdown select2-drop select2-with-searchbox select2-drop-active\" ng-class=\"{\'select2-display-none\': !$select.open}\"><div class=\"search-container\" ng-class=\"{\'ui-select-search-hidden\':!$select.searchEnabled, \'select2-search\':$select.searchEnabled}\"><input type=\"search\" autocomplete=\"off\" autocorrect=\"off\" autocapitalize=\"off\" spellcheck=\"false\" ng-class=\"{\'select2-active\': $select.refreshing}\" role=\"combobox\" aria-expanded=\"true\" aria-owns=\"ui-select-choices-{{ $select.generatedId }}\" aria-label=\"{{ $select.baseTitle }}\" class=\"ui-select-sear
 ch select2-input\" ng-model=\"$select.search\"></div><div class=\"ui-select-choices\"></div><div class=\"ui-select-no-choice\"></div></div></div>");
+$templateCache.put("selectize/choices.tpl.html","<div ng-show=\"$select.open\" class=\"ui-select-choices ui-select-dropdown selectize-dropdown\" ng-class=\"{\'single\': !$select.multiple, \'multi\': $select.multiple}\"><div class=\"ui-select-choices-content selectize-dropdown-content\"><div class=\"ui-select-choices-group optgroup\"><div ng-show=\"$select.isGrouped\" class=\"ui-select-choices-group-label optgroup-header\" ng-bind=\"$group.name\"></div><div role=\"option\" class=\"ui-select-choices-row\" ng-class=\"{active: $select.isActive(this), disabled: $select.isDisabled(this)}\"><div class=\"option ui-select-choices-row-inner\" data-selectable=\"\"></div></div></div></div></div>");
+$templateCache.put("selectize/match-multiple.tpl.html","<div class=\"ui-select-match\" data-value=\"\" ng-repeat=\"$item in $select.selected track by $index\" ng-click=\"$selectMultiple.activeMatchIndex = $index;\" ng-class=\"{\'active\':$selectMultiple.activeMatchIndex === $index}\" ui-select-sort=\"$select.selected\"><span class=\"ui-select-match-item\" ng-class=\"{\'select-locked\':$select.isLocked(this, $index)}\"><span uis-transclude-append=\"\"></span> <span class=\"remove ui-select-match-close\" ng-hide=\"$select.disabled\" ng-click=\"$selectMultiple.removeChoice($index)\">&times;</span></span></div>");
+$templateCache.put("selectize/match.tpl.html","<div ng-hide=\"$select.searchEnabled && ($select.open || $select.isEmpty())\" class=\"ui-select-match\"><span ng-show=\"!$select.searchEnabled && ($select.isEmpty() || $select.open)\" class=\"ui-select-placeholder text-muted\">{{$select.placeholder}}</span> <span ng-hide=\"$select.isEmpty() || $select.open\" ng-transclude=\"\"></span></div>");
+$templateCache.put("selectize/no-choice.tpl.html","<div class=\"ui-select-no-choice selectize-dropdown\" ng-show=\"$select.items.length == 0\"><div class=\"selectize-dropdown-content\"><div data-selectable=\"\" ng-transclude=\"\"></div></div></div>");
+$templateCache.put("selectize/select-multiple.tpl.html","<div class=\"ui-select-container selectize-control multi plugin-remove_button\" ng-class=\"{\'open\': $select.open}\"><div class=\"selectize-input\" ng-class=\"{\'focus\': $select.open, \'disabled\': $select.disabled, \'selectize-focus\' : $select.focus}\" ng-click=\"$select.open && !$select.searchEnabled ? $select.toggle($event) : $select.activate()\"><div class=\"ui-select-match\"></div><input type=\"search\" autocomplete=\"off\" tabindex=\"-1\" class=\"ui-select-search\" ng-class=\"{\'ui-select-search-hidden\':!$select.searchEnabled}\" placeholder=\"{{$selectMultiple.getPlaceholder()}}\" ng-model=\"$select.search\" ng-disabled=\"$select.disabled\" aria-expanded=\"{{$select.open}}\" aria-label=\"{{ $select.baseTitle }}\" ondrop=\"return false;\"></div><div class=\"ui-select-choices\"></div><div class=\"ui-select-no-choice\"></div></div>");
+$templateCache.put("selectize/select.tpl.html","<div class=\"ui-select-container selectize-control single\" ng-class=\"{\'open\': $select.open}\"><div class=\"selectize-input\" ng-class=\"{\'focus\': $select.open, \'disabled\': $select.disabled, \'selectize-focus\' : $select.focus}\" ng-click=\"$select.open && !$select.searchEnabled ? $select.toggle($event) : $select.activate()\"><div class=\"ui-select-match\"></div><input type=\"search\" autocomplete=\"off\" tabindex=\"-1\" class=\"ui-select-search ui-select-toggle\" ng-class=\"{\'ui-select-search-hidden\':!$select.searchEnabled}\" ng-click=\"$select.toggle($event)\" placeholder=\"{{$select.placeholder}}\" ng-model=\"$select.search\" ng-hide=\"!$select.isEmpty() && !$select.open\" ng-disabled=\"$select.disabled\" aria-label=\"{{ $select.baseTitle }}\"></div><div class=\"ui-select-choices\"></div><div class=\"ui-select-no-choice\"></div></div>");}]);
diff --git a/webapp/app/js/model/cubeConfig.js b/webapp/app/js/model/cubeConfig.js
index a77ab4f2a8..87413dcfd9 100644
--- a/webapp/app/js/model/cubeConfig.js
+++ b/webapp/app/js/model/cubeConfig.js
@@ -104,5 +104,8 @@ KylinApp.constant('cubeConfig', {
   rowKeyShardOptions:[
     true,false
   ],
-  statusNeedNofity:['ERROR', 'DISCARDED', 'SUCCEED']
+  statusNeedNofity:['ERROR', 'DISCARDED', 'SUCCEED'],
+  buildDictionaries:[
+    {name:"Global Dictionary", value:"org.apache.kylin.dict.GlobalDictionaryBuilder"}
+  ]
 });
diff --git a/webapp/app/js/services/cubes.js b/webapp/app/js/services/cubes.js
index 953f157911..de60f55d51 100644
--- a/webapp/app/js/services/cubes.js
+++ b/webapp/app/js/services/cubes.js
@@ -26,6 +26,7 @@ KylinApp.factory('CubeService', ['$resource', function ($resource, config) {
     cost: {method: 'PUT', params: {action: 'cost'}, isArray: false},
     rebuildLookUp: {method: 'PUT', params: {propName: 'segs', action: 'refresh_lookup'}, isArray: false},
     rebuildCube: {method: 'PUT', params: {action: 'rebuild'}, isArray: false},
+    rebuildStreamingCube: {method: 'PUT', params: {action: 'build2'}, isArray: false},
     disable: {method: 'PUT', params: {action: 'disable'}, isArray: false},
     enable: {method: 'PUT', params: {action: 'enable'}, isArray: false},
     purge: {method: 'PUT', params: {action: 'purge'}, isArray: false},
diff --git a/webapp/app/js/services/graph.js b/webapp/app/js/services/graph.js
index b191c50b3f..dc6951924d 100644
--- a/webapp/app/js/services/graph.js
+++ b/webapp/app/js/services/graph.js
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-KylinApp.service('GraphService', function (GraphBuilder) {
+KylinApp.service('GraphService', function (GraphBuilder, VdmUtil) {
 
   this.buildGraph = function (query) {
     var graphData = null;
@@ -27,6 +27,14 @@ KylinApp.service('GraphService', function (GraphBuilder) {
       metricsList = metricsList.concat(query.graph.state.metrics);
       angular.forEach(metricsList, function (metrics, index) {
         var aggregatedData = {};
+        angular.forEach(query.result.results,function(row,index){
+          angular.forEach(row,function(column,value){
+            var float = VdmUtil.SCToFloat(column);
+              if (float!=""){
+                query.result.results[index][value]=float;
+              }
+          });
+        });
         angular.forEach(query.result.results, function (data, index) {
           aggregatedData[data[dimension.index]] = (!!aggregatedData[data[dimension.index]] ? aggregatedData[data[dimension.index]] : 0)
           + parseFloat(data[metrics.index].replace(/[^\d\.\-]/g, ""));
diff --git a/webapp/app/js/utils/utils.js b/webapp/app/js/utils/utils.js
index d838e9e824..2e95342bd7 100644
--- a/webapp/app/js/utils/utils.js
+++ b/webapp/app/js/utils/utils.js
@@ -63,6 +63,26 @@ KylinApp.factory('VdmUtil', function ($modal, $timeout, $location, $anchorScroll
         if (new RegExp("(" + k + ")").test(fmt)) fmt = fmt.replace(RegExp.$1, (RegExp.$1.length == 1) ? (o[k]) : (("00" + o[k]).substr(("" + o[k]).length)));
 
       return fmt;
+    },
+
+    SCToFloat:function(data){
+      var resultValue = "";
+      if (data&&data.indexOf('E') != -1){
+        var regExp = new RegExp('^((\\d+.?\\d+)[Ee]{1}(\\d+))$', 'ig');
+        var result = regExp.exec(data);
+        var power = "";
+        if (result != null){
+          resultValue = result[2];
+          power = result[3];
+        }
+        if (resultValue != ""){
+          if (power != ""){
+            var powVer = Math.pow(10, power);
+            resultValue = (resultValue * powVer).toFixed(2);
+          }
+        }
+      }
+      return resultValue;
     }
   }
 });
diff --git a/webapp/app/partials/cubeDesigner/advanced_settings.html b/webapp/app/partials/cubeDesigner/advanced_settings.html
index 34fd49232a..2728240860 100755
--- a/webapp/app/partials/cubeDesigner/advanced_settings.html
+++ b/webapp/app/partials/cubeDesigner/advanced_settings.html
@@ -53,7 +53,7 @@ <h3 style="margin-left:42px;">Aggregation Groups </h3>
 
                       <ui-select
                         autofocus="true"
-                        close-on-select="false"
+                        close-on-select="false"    ng-disabled="instance.status=='READY'"
                         on-select="refreshAggregationGroup(cubeMetaFrame.aggregation_groups, rowIndex, aggregation_group)"
                         on-remove="refreshAggregationGroup(cubeMetaFrame.aggregation_groups, rowIndex, aggregation_group)"
                         ng-if="state.mode=='edit'" style="width: 100%" multiple ng-model="aggregation_group.includes">
@@ -77,7 +77,7 @@ <h3 style="margin-left:42px;">Aggregation Groups </h3>
                     <td class="col-xs-8">
                       <ui-select
                         close-on-select="false"
-                        autofocus="true"
+                        autofocus="true"       ng-disabled="instance.status=='READY'"
                         on-select="refreshAggregationGroup(cubeMetaFrame.aggregation_groups, rowIndex, aggregation_group)"
                         on-remove="refreshAggregationGroup(cubeMetaFrame.aggregation_groups, rowIndex, aggregation_group)"
                         ng-if="state.mode=='edit'" style="width: 100%" multiple ng-model="aggregation_group.select_rule.mandatory_dims">
@@ -100,11 +100,11 @@ <h3 style="margin-left:42px;">Aggregation Groups </h3>
                     </td>
                     <td class="col-xs-9">
                       <table class="table">
-                        <tr class="row" ng-repeat="hierarchyDims in aggregation_group.select_rule.hierarchy_dims track by $index">
+                        <tr class="row" ng-repeat="hierarchyDims in aggregation_group.select_rule.hierarchy_dims">
                           <td class="col-xs-10">
                             <ui-select
                               close-on-select="false"
-                              autofocus="true"
+                              autofocus="true"      ng-disabled="instance.status=='READY'"
                               on-select="refreshAggregationHierarchy(cubeMetaFrame.aggregation_groups, rowIndex, aggregation_group,$index,hierarchyDims)"
                               on-remove="refreshAggregationHierarchy(cubeMetaFrame.aggregation_groups, rowIndex, aggregation_group,$index,hierarchyDims)"
                               ng-if="state.mode=='edit'" style="width: 100%" multiple ng-model="hierarchyDims">
@@ -118,7 +118,7 @@ <h3 style="margin-left:42px;">Aggregation Groups </h3>
                               {{hierarchyDims}}</p>
                           </td>
                           <td class="col-xs-2">
-                            <button class="btn btn-sm btn-info"
+                            <button class="btn btn-sm btn-info"  ng-disabled="instance.status=='READY'"
                                     ng-click="removeHierarchy(aggregation_group,hierarchyDims)"
                                     ng-show="state.mode=='edit'"><i class="fa fa-minus"></i>
                             </button>
@@ -126,7 +126,7 @@ <h3 style="margin-left:42px;">Aggregation Groups </h3>
                         </tr>
                         <tr class="row">
                           <td class="col-xs-12">
-                            <button class="btn btn-sm btn-info"
+                            <button class="btn btn-sm btn-info"     ng-disabled="instance.status=='READY'"
                                     ng-click="addNewHierarchy(aggregation_group)" ng-show="state.mode=='edit'">New Hierarchy<i class="fa fa-plus"></i>
                             </button>
                           </td>
@@ -140,12 +140,12 @@ <h3 style="margin-left:42px;">Aggregation Groups </h3>
                     </td>
                     <td class="col-xs-9">
                       <table class="table">
-                        <tr class="row" ng-repeat="jointDims in aggregation_group.select_rule.joint_dims track by $index">
+                        <tr class="row" ng-repeat="jointDims in aggregation_group.select_rule.joint_dims">
                           <td class="col-xs-10">
                             <!-- Dimensions -->
                             <ui-select
                               close-on-select="false"
-                              autofocus="true"
+                              autofocus="true"    ng-disabled="instance.status=='READY'"
                               on-select="refreshAggregationJoint(cubeMetaFrame.aggregation_groups, rowIndex, aggregation_group,$index,jointDims)"
                               on-remove="refreshAggregationJoint(cubeMetaFrame.aggregation_groups, rowIndex, aggregation_group,$index,jointDims)"
                               ng-if="state.mode=='edit'" style="width: 100%" multiple ng-model="jointDims">
@@ -159,7 +159,7 @@ <h3 style="margin-left:42px;">Aggregation Groups </h3>
                               {{jointDims}}</p>
                           </td>
                           <td class="col-xs-2">
-                            <button class="btn btn-sm btn-info"
+                            <button class="btn btn-sm btn-info"   ng-disabled="instance.status=='READY'"
                                     ng-click="removeJointDims(aggregation_group,jointDims)"
                                     ng-show="state.mode=='edit'"><i class="fa fa-minus"></i>
                             </button>
@@ -167,7 +167,7 @@ <h3 style="margin-left:42px;">Aggregation Groups </h3>
                         </tr>
                         <tr class="row">
                           <td class="col-xs-12">
-                            <button class="btn btn-sm btn-info"
+                            <button class="btn btn-sm btn-info"      ng-disabled="instance.status=='READY'"
                                     ng-click="addNewJoint(aggregation_group)" ng-show="state.mode=='edit'">New Joint<i class="fa fa-plus"></i>
                             </button>
                           </td>
@@ -178,14 +178,14 @@ <h3 style="margin-left:42px;">Aggregation Groups </h3>
                 </table>
               </td>
               <td ng-if="state.mode=='edit'" class="col-xs-1">
-                <button class="btn btn-xs btn-info"
+                <button class="btn btn-xs btn-info"   ng-disabled="instance.status=='READY'"
                         ng-click="removeElement(cubeMetaFrame.aggregation_groups, aggregation_group)"><i
                   class="fa fa-minus"></i>
                 </button>
               </td>
             </tr>
           </table>
-          <button class="btn btn-sm btn-info" style="margin-left:42px" ng-click="addNewAggregationGroup()"
+          <button class="btn btn-sm btn-info" style="margin-left:42px" ng-click="addNewAggregationGroup()"  ng-disabled="instance.status=='READY'"
                   ng-show="state.mode=='edit'">New Aggregation Group<i class="fa fa-plus"></i>
           </button>
         </div>
@@ -223,7 +223,7 @@ <h3 style="margin-left:42px">Rowkeys  <i kylinpopover placement="right" title="R
                     <td>
                       <!--Column Name -->
                       <input type="text" class="form-control" placeholder="Column Name.." ng-if="state.mode=='edit'"
-                             ng-change="refreshRowKey(convertedRowkeys,$index,rowkey_column)"
+                             ng-change="refreshRowKey(convertedRowkeys,$index,rowkey_column)"  ng-disabled="instance.status=='READY'"
                              tooltip="rowkey column name.." tooltip-trigger="focus"
                              ng-model="rowkey_column.column" class="form-control">
 
@@ -231,7 +231,7 @@ <h3 style="margin-left:42px">Rowkeys  <i kylinpopover placement="right" title="R
                     </td>
 
                     <td>
-                      <select ng-if="state.mode=='edit'" style="width:180px;"
+                      <select ng-if="state.mode=='edit'" style="width:180px;"     ng-disabled="instance.status=='READY'"
                               chosen ng-model="rowkey_column.encoding"
                               ng-change="refreshRowKey(convertedRowkeys,$index,rowkey_column);"
                               ng-options="dt.value as dt.name for dt in getEncodings(rowkey_column.column)">
@@ -244,7 +244,7 @@ <h3 style="margin-left:42px">Rowkeys  <i kylinpopover placement="right" title="R
                       <!--Column Length -->
                       <input type="text" class="form-control" placeholder="Column Length.." ng-if="state.mode=='edit'"
                              tooltip="rowkey column length.." tooltip-trigger="focus"
-                             ng-disabled="rowkey_column.encoding=='dict'||rowkey_column.encoding=='date'||rowkey_column.encoding=='time'||rowkey_column.encoding=='boolean'"
+                             ng-disabled="rowkey_column.encoding=='dict'||rowkey_column.encoding=='date'||rowkey_column.encoding=='time'||rowkey_column.encoding=='boolean'||instance.status=='READY'"
                              ng-change="refreshRowKey(convertedRowkeys,$index,rowkey_column);"
                              ng-model="rowkey_column.valueLength" class="form-control">
 
@@ -255,7 +255,7 @@ <h3 style="margin-left:42px">Rowkeys  <i kylinpopover placement="right" title="R
                     <td>
                       <select ng-if="state.mode=='edit'" style="width:180px;"
                               chosen ng-model="rowkey_column.isShardBy"
-                              tooltip="false by default"
+                              tooltip="false by default"     ng-disabled="instance.status=='READY'"
                               data-placeholder="false by default"
                               ng-change="refreshRowKey(convertedRowkeys,$index,rowkey_column,true)"
                               ng-options="dt as dt for dt in cubeConfig.rowKeyShardOptions">
@@ -266,7 +266,7 @@ <h3 style="margin-left:42px">Rowkeys  <i kylinpopover placement="right" title="R
                     </td>
 
                     <td ng-if="state.mode=='edit'">
-                      <button class="btn btn-xs btn-info"
+                      <button class="btn btn-xs btn-info"  ng-disabled="instance.status=='READY'"
                               ng-click="removeRowkey(convertedRowkeys, $index,rowkey_column)"><i
                         class="fa fa-minus"></i>
                       </button>
@@ -276,7 +276,7 @@ <h3 style="margin-left:42px">Rowkeys  <i kylinpopover placement="right" title="R
                 </table>
               </div>
           </div>
-          <button class="btn btn-sm btn-info" style="margin-left:42px"
+          <button class="btn btn-sm btn-info" style="margin-left:42px" ng-disabled="instance.status=='READY'"
                   ng-click="addNewRowkeyColumn()" ng-show="state.mode=='edit'">New Rowkey Column<i class="fa fa-plus"></i>
           </button>
         </div>
@@ -310,11 +310,11 @@ <h3 style="margin-left:42px">Advanced Dictionaries  <i kylinpopover placement="r
                     </td>
                     <td ng-if="state.mode=='edit'">
                       <!--Edit Button -->
-                      <button class="btn btn-xs btn-info" ng-click="addNewDictionaries(dictionaries, $index)" >
+                      <button class="btn btn-xs btn-info" ng-click="addNewDictionaries(dictionaries, $index)" ng-disabled="instance.status=='READY'">
                         <i class="fa fa-pencil"></i>
                       </button>
                       <!--Remove Button -->
-                      <button class="btn btn-xs  btn-danger" ng-click="removeDictionaries(cubeMetaFrame.dictionaries, dictionaries)">
+                      <button class="btn btn-xs  btn-danger" ng-click="removeDictionaries(cubeMetaFrame.dictionaries, dictionaries)" ng-disabled="instance.status=='READY'">
                         <i class="fa fa-trash-o"></i>
                       </button>
                     </td>
@@ -326,7 +326,7 @@ <h3 style="margin-left:42px">Advanced Dictionaries  <i kylinpopover placement="r
         </div>
         <!--Add Dictionaries Button-->
         <div class="form-group" style="margin-left:42px;">
-          <button class="btn btn-sm btn-info" ng-click="addNewDictionaries()" ng-show="state.mode=='edit' && !addNew">
+          <button class="btn btn-sm btn-info" ng-click="addNewDictionaries()" ng-show="state.mode=='edit' && !addNew" ng-disabled="instance.status=='READY'">
             <i class="fa fa-plus"></i> Dictionaries
           </button>
        </div>
@@ -368,7 +368,12 @@ <h4 class="box-title text-info">Edit Dictionaries</h4>
                    <div class="row">
                      <label class="col-xs-12 col-sm-3 control-label no-padding-right font-color-default"><b>Builder Class</b></label>
                      <div class="col-xs-12 col-sm-6">
-                       <input type="text" name="builder"  ng-model="newDictionaries.builder" required  style="width:100% " />
+                       <select class="form-control" chosen
+                               ng-model="newDictionaries.builder"
+                               ng-options="item.value as item.name for item in cubeConfig.buildDictionaries"
+                               required>
+                         <option value="">-- Select a builder class--</option>
+                       </select>
                      </div>
                    </div>
                  </div>
diff --git a/webapp/app/partials/cubeDesigner/dimensions.html b/webapp/app/partials/cubeDesigner/dimensions.html
index 3a2efb6592..ed265c4deb 100644
--- a/webapp/app/partials/cubeDesigner/dimensions.html
+++ b/webapp/app/partials/cubeDesigner/dimensions.html
@@ -23,7 +23,7 @@
         <div class="row">
             <div class="col-xs-6" ng-if="state.mode=='edit'">
                 <div class="btn-group">
-                    <button type="button" class="btn btn-primary dropdown-toggle" data-toggle="dropdown">
+                    <button type="button" class="btn btn-primary dropdown-toggle" data-toggle="dropdown" ng-disabled="instance.status=='READY'">
                         <i class="fa fa-plus"></i> Add Dimension <span class="ace-icon fa fa-caret-down icon-on-right"></span>
                     </button>
                     <ul class="dropdown-menu" role="menu">
@@ -32,8 +32,8 @@
                         <li><a ng-click="addDim('derived')">Derived</a></li>
                     </ul>
                 </div>
-                <button type="button" class="btn btn-default" ng-disabled="!metaModel.model.fact_table.length"
-                        ng-click="openAutoGenModal()">
+                <button type="button" class="btn btn-default" ng-disabled="!metaModel.model.fact_table.length||instance.status=='READY'"
+                        ng-click="openAutoGenModal()" >
                     <i class="fa fa-building-o"></i> Auto Generator
                 </button>
             </div>
@@ -94,11 +94,11 @@
                     </td>
                     <td ng-if="state.mode=='edit'">
                         <!--edit button-->
-                        <button class="btn btn-xs btn-info"
+                        <button class="btn btn-xs btn-info" ng-disabled="instance.status=='READY'"
                                 ng-click="editDim(dimension)"><i class="fa fa-pencil"></i>
                         </button>
                         <!-- remove button-->
-                        <button class="btn btn-xs btn-danger"
+                        <button class="btn btn-xs btn-danger" ng-disabled="instance.status=='READY'"
                                 ng-click="removeDim(dimension)"><i class="fa fa-trash-o"></i>
                         </button>
                     </td>
@@ -224,30 +224,31 @@ <h4 class="box-title lighter">{{dimState.editing ? 'Edit' : 'Add'}} Dimension
 
     <script type="text/ng-template" id="autoGenDimension.html">
         <div class="modal-header large-popover">
-            <h4 class="box-title lighter">Auto Generate Dimensions <small>This is a helper for you to batch generate dimensions.</small><i kylinpopover placement="right" title="Auto Generate Rules" template="AutoDimensionsTip.html" class="fa fa-info-circle"></i></h4>
+            <h4 class="box-title lighter">Auto Generate Dimensions <i kylinpopover placement="right" title="Auto Generate Rules" template="AutoDimensionsTip.html" class="fa fa-info-circle"></i> <small>This is a helper for you to batch generate dimensions.</small></h4>
+            <div class="col-xs-12">
+                Visit <a href="http://kylin.apache.org/docs/howto/howto_optimize_cubes.html" target="_blank">derived column</a> for more about derived column.
+            </div>
         </div>
         <div class="modal-body">
             <div class="row">
                 <div class="col-xs-12">
                     <div class="box box-solid">
-                        <div class="box-header"><h4>Columns</h4></div>
                         <div class="box-body">
                           <ul class="list-unstyled columns-region">
-                            <accordion close-others=false>
                               <!--FactTable-->
-                              <div ng-repeat="table in availableTables track by $index" ng-if="$index == 0" accordion-group class="panel-default " heading="{{table}}{{dataKylin.cube.cubeDSFactTable}}" is-open=true>
-                                <table class="table table-striped table-hover ng-scope">
+                              <div ng-repeat="table in availableTables track by $index" ng-if="$index == 0"  class="panel-default " >
+                                <h4>{{table}}[FactTable]</h4>
+                                <table class="table table-striped table-hover ng-scope"    >
                                   <tr >
-                                    <td class="col-xs-1"></td>
+                                    <td class="col-xs-2"><label><input type="checkbox" ng-model="selectedColumns[table].all" ng-change="autoChangeAll(table)">Select All</label></td>
                                     <td class="col-xs-4"><label>Name</label></td>
                                     <td class="col-xs-3"><label>Columns</label></td>
-                                    <td colspan="2" class="col-xs-4"></td>
+                                    <td colspan="2" class="col-xs-3"></td>
                                   </tr>
                                   <tr ng-repeat="col in availableColumns[table] track by col.table + '.' + col.name" >
                                     <td >
                                       <label class="dim-checkbox-label">
-                                      <input type="checkbox" ng-model="selectedColumns[table][col.name].selected"
-                                             ng-disabled="selectedColumns[table][col.name].disabled"  >
+                                      <input type="checkbox" ng-model="selectedColumns[table][col.name].selected" ng-change="autoChange(table,col.name)">
                                       </label>
                                     </td>
                                     <td >
@@ -256,27 +257,28 @@ <h4 class="box-title lighter">Auto Generate Dimensions <small>This is a helper f
                                     <td>
                                       {{col.name}}
                                     </td>
+                                    <td colspan="2"></td>
                                   </tr>
                                 </table>
                               </div>
                               <!--LookUp Table-->
-                              <div ng-repeat="table in availableTables track by $index" ng-if="$index > 0" accordion-group class="panel-default" heading="{{table}}{{dataKylin.cube.cubeDSLookupTable}}">
+                              <div ng-repeat="table in availableTables track by $index" ng-if="$index > 0"  class="panel-default" >
+                                <h4>{{table}}[LookupTable]</h4>
                                 <table class="table table-striped table-hover ng-scope">
                                   <tr class="row" >
-                                    <td class="col-xs-1"></td>
+                                    <td class="col-xs-2"><label><input type="checkbox" ng-model="selectedColumns[table].all" ng-change="autoChangeAll(table)">Select All</label></td>
                                     <td class="col-xs-4"><label>Name</label></td>
                                     <td class="col-xs-3"><label>Columns</label></td>
-                                    <td colspan="2" class="col-xs-4"></td>
+                                    <td colspan="2" class="col-xs-3"></td>
                                   </tr>
                                   <tr ng-repeat="col in availableColumns[table] track by col.table + '.' + col.name" class="row">
                                     <td class=>
                                       <label class="dim-checkbox-label">
-                                      <input type="checkbox" ng-model="selectedColumns[table][col.name].selected"
-                                             ng-disabled="selectedColumns[table][col.name].disabled"  ng-change="autoChange(table,col.name)">
+                                      <input type="checkbox" ng-model="selectedColumns[table][col.name].selected" ng-change="autoChange(table,col.name)">
                                       </label>
                                     </td>
                                     <td >
-                                      <input type="text" ng-model="selectedColumns[table][col.name].name" placeholder={{table}}_derived  ng-disabled="!selectedColumns[table][col.name].selected" style="width:90%;">
+                                      <input type="text" ng-model="selectedColumns[table][col.name].name"  ng-disabled="!selectedColumns[table][col.name].selected" style="width:90%;">
                                     </td>
                                     <td >
                                       {{col.name}}
@@ -290,28 +292,15 @@ <h4 class="box-title lighter">Auto Generate Dimensions <small>This is a helper f
                                   </tr>
                                 </table>
                               </div>
-                            </accordion>
                           </ul>
                         </div>
                     </div>
                 </div>
-
-                <!--Tips-->
-                <div class="col-xs-4">
-                    <div class="box box-solid">
-                        <div class="box-header">
-                            <h4 class="box-title"></h4>
-                        </div>
-                        <div class="box-body">
-
-                        </div>
-                    </div>
-                </div>
             </div>
         </div>
         <div class="modal-footer">
-            <button class="btn btn-primary" ng-disabled="" ng-click="checkAutoDimension()?ok():''">OK</button>
             <button class="btn btn-warning" ng-click="cancel()">Cancel</button>
+            <button class="btn btn-primary" ng-disabled="" ng-click="checkAutoDimension()?ok():''">OK</button>
         </div>
     </script>
     </ng-form>
@@ -320,9 +309,8 @@ <h4 class="box-title"></h4>
   <div class="row">
     <div class="col-xs-12">
       <ol class="text-info">
-        <li>Column is disabled once there is already a dimension referred this column.</li>
         <li>In fact table: one normal dimension will be auto generated per column.</li>
-        <li>In lookup table: one derived dimension will be auto generated for all the selected columns.</li>
+        <li>In lookup table: you can choose to generate a derived column or a normal column.</li>
       </ol>
     </div>
   </div>
diff --git a/webapp/app/partials/cubeDesigner/info.html b/webapp/app/partials/cubeDesigner/info.html
index 755fc1257c..b1e4d483de 100644
--- a/webapp/app/partials/cubeDesigner/info.html
+++ b/webapp/app/partials/cubeDesigner/info.html
@@ -112,7 +112,7 @@
                         <b>Description</b>
                     </label>
                     <div class="col-xs-12 col-sm-6">
-                        <textarea ng-if="state.mode=='edit'" class="form-control box-default"
+                        <textarea ng-if="state.mode=='edit'" class="form-control box-default"  ng-disabled="instance.status=='READY'"
                             name="comment" id="comment" ng-model="cubeMetaFrame.description"></textarea>
                         <span ng-if="state.mode=='view'">{{cubeMetaFrame.description}}</span>
                     </div>
diff --git a/webapp/app/partials/cubeDesigner/measures.html b/webapp/app/partials/cubeDesigner/measures.html
index b4513f291a..8961cc6dda 100755
--- a/webapp/app/partials/cubeDesigner/measures.html
+++ b/webapp/app/partials/cubeDesigner/measures.html
@@ -56,11 +56,11 @@
                           </td>
                           <td ng-if="state.mode=='edit'">
                               <!--Edit Button -->
-                              <button class="btn btn-xs btn-info" ng-click="addNewMeasure(measure, $index)">
+                              <button class="btn btn-xs btn-info" ng-click="addNewMeasure(measure, $index)" ng-disabled="instance.status=='READY'">
                                   <i class="fa fa-pencil"></i>
                               </button>
                               <!--Remove Button -->
-                              <button class="btn btn-xs  btn-danger" ng-click="removeElement(cubeMetaFrame.measures, measure)">
+                              <button class="btn btn-xs  btn-danger" ng-click="removeElement(cubeMetaFrame.measures, measure)" ng-disabled="instance.status=='READY'">
                                   <i class="fa fa-trash-o"></i>
                               </button>
                           </td>
@@ -72,7 +72,7 @@
 
   <!--Add Measures Button-->
   <div class="form-group">
-      <button class="btn btn-sm btn-info" ng-click="addNewMeasure()" ng-show="state.mode=='edit' && !newMeasure">
+      <button class="btn btn-sm btn-info" ng-click="addNewMeasure()" ng-show="state.mode=='edit' && !newMeasure" ng-disabled="instance.status=='READY'">
           <i class="fa fa-plus"></i> Measure
       </button>
   </div>
@@ -149,15 +149,22 @@ <h4 class="box-title text-info">Edit Measure</h4>
                                       ng-init="newMeasure.function.parameter.value = 1"><b>&nbsp;&nbsp;1</b></span>
                                 <!--!COUNT_DISTINCT-->
                                 <select class="form-control" chosen
-                                        ng-if="newMeasure.function.parameter.type == 'column' && newMeasure.function.expression !== 'EXTENDED_COLUMN'"
-                                        ng-model="newMeasure.function.parameter.value"
+                                        ng-if="newMeasure.function.parameter.type == 'column' && (newMeasure.function.expression == 'COUNT_DISTINCT'||newMeasure.function.expression == 'RAW')"
+                                        ng-model="newMeasure.function.parameter.value" required
                                         ng-change="measureReturnTypeUpdate();"
-                                        ng-options="column as column for column in getCommonMetricColumns()" >
+                                        ng-options="column as column for column in getAllModelDimMeasureColumns(newMeasure)" >
+                                  <option value="">-- Select a Column --</option>
+                                </select>
+                                <select class="form-control" chosen
+                                        ng-if="newMeasure.function.parameter.type == 'column' && newMeasure.function.expression !== 'EXTENDED_COLUMN' && newMeasure.function.expression !== 'COUNT_DISTINCT' && newMeasure.function.expression !== 'RAW'"
+                                        ng-model="newMeasure.function.parameter.value" required
+                                        ng-change="measureReturnTypeUpdate();"
+                                        ng-options="column as column for column in getCommonMetricColumns(newMeasure)" >
                                   <option value="">-- Select a Column --</option>
                                 </select>
                                 <select class="form-control" chosen
                                         ng-if="newMeasure.function.expression == 'EXTENDED_COLUMN'"
-                                        ng-model="newMeasure.function.parameter.value"
+                                        ng-model="newMeasure.function.parameter.value" required
                                         ng-change="measureReturnTypeUpdate();"
                                         ng-options="column as column for column in getExtendedHostColumn()" >
                                   <option value="">-- Select a Column --</option>
@@ -165,44 +172,6 @@ <h4 class="box-title text-info">Edit Measure</h4>
                               </div>
                           </div>
                       </div>
-                      <!--Return Type-->
-                      <div class="form-group middle-popover">
-                          <div class="row">
-                              <label class="col-xs-12 col-sm-3 control-label no-padding-right font-color-default">
-                                <b ng-if="newMeasure.function.expression !== 'EXTENDED_COLUMN'">Return Type</b>
-                                <b ng-if="newMeasure.function.expression == 'EXTENDED_COLUMN'">Maximum length of extended column</b>  <i ng-if="newMeasure.function.expression == 'EXTENDED_COLUMN'" title="Maximum Length" class="fa fa-info-circle" kylinpopover placement="right" template="extendedTypeTip.html"></i>
-                              </label>
-                              <div class="col-xs-12 col-sm-6">
-                                  <select class="form-control"
-                                      ng-if="newMeasure.function.expression == 'COUNT_DISTINCT'"
-                                      ng-init="newMeasure.function.returntype = (!!newMeasure.function.returntype)?newMeasure.function.returntype:cubeConfig.dftSelections.distinctDataType.value"
-                                      chosen ng-model="newMeasure.function.returntype" required
-                                      ng-options="ddt.value as ddt.name for ddt in cubeConfig.distinctDataTypes">
-                                      <option value=""></option>
-                                  </select>
-                                <select class="form-control"
-                                        ng-if="newMeasure.function.expression == 'TOP_N'"
-                                        ng-init="newMeasure.function.returntype = (!!newMeasure.function.returntype)?newMeasure.function.returntype:cubeConfig.dftSelections.topN.value"
-                                        chosen ng-model="newMeasure.function.returntype" required
-                                        ng-options="ddt.value as ddt.name for ddt in cubeConfig.topNTypes">
-                                  <option value=""></option>
-                                </select>
-
-                                <input extended-column-return
-                                  ng-if="newMeasure.function.expression == 'EXTENDED_COLUMN'"
-                                  type="text" placeholder="Kylin won?t save more than this number of bytes" class="form-control"
-                                       tooltip-trigger="focus"
-                                       ng-init="newMeasure.function.returntype=newMeasure.function.returntype?newMeasure.function.returntype:'extendedcolumn(100)'"
-                                       ng-model="newMeasure.function.returntype" required />
-
-                                  <span class="font-color-default"
-                                        ng-if="newMeasure.function.expression != 'COUNT_DISTINCT' && newMeasure.function.expression != 'TOP_N' && newMeasure.function.expression != 'EXTENDED_COLUMN' "
-                                       ><b>&nbsp;&nbsp;{{newMeasure.function.returntype | uppercase}}</b>
-                                  </span>
-                              </div>
-                          </div>
-                      </div>
-
 
                       <div class="form-group middle-popover" ng-if="newMeasure.function.expression == 'EXTENDED_COLUMN'">
                         <div class="row">
@@ -212,12 +181,51 @@ <h4 class="box-title text-info">Edit Measure</h4>
                           <div class="col-xs-12 col-sm-6">
                             <select class="form-control" chosen ng-if="nextPara.type !== 'constant'" required
                                     ng-model="nextPara.value"
-                                    ng-options="column as column for column in getCommonMetricColumns()" >
+                                    ng-options="column as column for column in getAllModelDimColumns()" >
+                              <option value="">-- Select a Column --</option>
+                            </select>
+                          </div>
+                        </div>
+                      </div>
+
+                      <!--Return Type-->
+                      <div class="form-group middle-popover">
+                        <div class="row">
+                          <label class="col-xs-12 col-sm-3 control-label no-padding-right font-color-default">
+                            <b ng-if="newMeasure.function.expression !== 'EXTENDED_COLUMN'">Return Type</b>
+                            <b ng-if="newMeasure.function.expression == 'EXTENDED_COLUMN'">Maximum length of extended column</b>  <i ng-if="newMeasure.function.expression == 'EXTENDED_COLUMN'" title="Maximum Length" class="fa fa-info-circle" kylinpopover placement="right" template="extendedTypeTip.html"></i>
+                          </label>
+                          <div class="col-xs-12 col-sm-6">
+                            <select class="form-control"
+                                    ng-if="newMeasure.function.expression == 'COUNT_DISTINCT'"
+                                    ng-init="newMeasure.function.returntype = (!!newMeasure.function.returntype)?newMeasure.function.returntype:cubeConfig.dftSelections.distinctDataType.value"
+                                    chosen ng-model="newMeasure.function.returntype" required
+                                    ng-options="ddt.value as ddt.name for ddt in cubeConfig.distinctDataTypes">
                               <option value=""></option>
                             </select>
+                            <select class="form-control"
+                                    ng-if="newMeasure.function.expression == 'TOP_N'"
+                                    ng-init="newMeasure.function.returntype = (!!newMeasure.function.returntype)?newMeasure.function.returntype:cubeConfig.dftSelections.topN.value"
+                                    chosen ng-model="newMeasure.function.returntype" required
+                                    ng-options="ddt.value as ddt.name for ddt in cubeConfig.topNTypes">
+                              <option value=""></option>
+                            </select>
+
+                            <input extended-column-return
+                                   ng-if="newMeasure.function.expression == 'EXTENDED_COLUMN'"
+                                   type="text" placeholder="Kylin won?t save more than this number of bytes" class="form-control"
+                                   tooltip-trigger="focus"
+                                   ng-init="newMeasure.function.returntype=newMeasure.function.returntype?newMeasure.function.returntype:'extendedcolumn(100)'"
+                                   ng-model="newMeasure.function.returntype" required />
+
+                                        <span class="font-color-default"
+                                              ng-if="newMeasure.function.expression != 'COUNT_DISTINCT' && newMeasure.function.expression != 'TOP_N' && newMeasure.function.expression != 'EXTENDED_COLUMN' "
+                                        ><b>&nbsp;&nbsp;{{newMeasure.function.returntype | uppercase}}</b>
+                                        </span>
                           </div>
                         </div>
                       </div>
+
                       <!--Group by Column-->
                       <div class="form-group" ng-if="newMeasure.function.expression == 'TOP_N'" >
                         <div class="row">
@@ -251,7 +259,7 @@ <h4 class="box-title text-info">Edit Measure</h4>
                                     <td>
                                       <select class="form-control" chosen ng-if="nextPara.type !== 'constant'" required
                                               ng-model="groupby_column.name"
-                                              ng-options="column as column for column in getGroupByColumns()" style="width:200px;">
+                                              ng-options="column as column for column in getAllModelDimColumns()" style="width:200px;">
                                         <option value="">--Select A Column--</option>
                                       </select>
                                     </td>
diff --git a/webapp/app/partials/cubeDesigner/refresh_settings.html b/webapp/app/partials/cubeDesigner/refresh_settings.html
index 0d88e7c016..fc5f657c68 100755
--- a/webapp/app/partials/cubeDesigner/refresh_settings.html
+++ b/webapp/app/partials/cubeDesigner/refresh_settings.html
@@ -126,13 +126,15 @@
                     <!--<input type="text" class="form-control" datepicker-popup="yyyy-MM-dd" datepicker-timezone-->
                            <!--ng-model="cubeMetaFrame.partition_date_start" ng-if="state.mode=='edit'"-->
                            <!--placeholder="Click to choose start date..." is-open="opened"/>-->
-                    <div ng-if="state.mode=='edit'" class="dropdown">
+                    <div ng-if="state.mode=='edit'" class="dropdown" >
                       <a class="dropdown-toggle" id="dropdown2" role="button" data-toggle="dropdown" data-target="#" href="#">
-                        <div class="input-group"><input type="text" class="form-control" date-timepicker-timezone data-ng-model="cubeMetaFrame.partition_date_start"><span class="input-group-addon"><i class="glyphicon glyphicon-calendar"></i></span>
+                        <div class="input-group">
+                          <input type="text" class="form-control" date-timepicker-timezone data-ng-model="cubeMetaFrame.partition_date_start" ng-disabled="instance.status=='READY'">
+                          <span class="input-group-addon"><i class="glyphicon glyphicon-calendar"></i></span>
                         </div>
                       </a>
-                      <ul class="dropdown-menu" role="menu" aria-labelledby="dLabel">
-                        <datetimepicker  data-ng-model="cubeMetaFrame.partition_date_start" data-datetimepicker-config="{ dropdownSelector: '#dropdown2' }"/>
+                      <ul class="dropdown-menu" role="menu" aria-labelledby="dLabel" ng-if="!(instance.status=='READY')">
+                        <datetimepicker  data-ng-model="cubeMetaFrame.partition_date_start" data-datetimepicker-config="{ dropdownSelector: '#dropdown2' }" />
                       </ul>
                     </div>
 
diff --git a/webapp/app/partials/cubeDesigner/streamingConfig.html b/webapp/app/partials/cubeDesigner/streamingConfig.html
index 0148ac3802..8bdcd25d42 100644
--- a/webapp/app/partials/cubeDesigner/streamingConfig.html
+++ b/webapp/app/partials/cubeDesigner/streamingConfig.html
@@ -124,176 +124,151 @@ <h3 class="box-title">Cluster-{{$index+1}}</h3>
 
           </accordion-group>
         </accordion>
-
       <hr/>
-      <!--Advanced setting-->
       <accordion>
-        <accordion-group is-open="state.isStreamingAdOpen" ng-init="state.isStreamingAdOpen=true">
+
+        <accordion-group is-open="state.isParserHeaderOpen=true">
           <accordion-heading>
-            Advanced Setting
+            Parser Setting
             <i class="pull-right glyphicon"
-               ng-class="{'glyphicon-chevron-down': state.isStreamingAdOpen, 'glyphicon-chevron-right': !state.isStreamingAdOpen}"></i>
+               ng-class="{'glyphicon-chevron-down': state.isParserHeaderOpen, 'glyphicon-chevron-right': !state.isParserHeaderOpen}"></i>
           </accordion-heading>
 
           <div class="form-group middle-popover" ng-class="{'required':state.mode=='edit'}">
             <div class="row">
               <label class="col-xs-12 col-sm-3 control-label no-padding-right">
-                <b>Timeout</b>
-                <i class="fa fa-info-circle" kylinpopover placement="right" title="Timeout" template="TimeoutTip.html"></i>
+                <b>Parser Name</b>
+                <i class="fa fa-info-circle" kylinpopover placement="right" title="Parser Name" template="ParserName.html"></i>
               </label>
 
               <div class="col-xs-12 col-sm-6"
-                   ng-class="{'has-error':form.cube_streaming_form.timeout.$invalid && (form.cube_streaming_form.timeout.$dirty||form.cube_streaming_form.$submitted)}">
-                <input ng-if="state.mode=='edit'" name="timeout" required ng-model="kafkaMeta.timeout" type="text"
-                       placeholder="Input kafkaConfig timeout"
-                       ng-pattern="/^\+?[1-9][0-9]*$/"
+                   ng-class="{'has-error':form.cube_streaming_form.parserName.$invalid && (form.cube_streaming_form.parserName.$dirty||form.cube_streaming_form.$submitted)}">
+                <input ng-if="state.mode=='edit'" name="parserName" required ng-model="kafkaMeta.parserName" type="text"
+                       placeholder="Input kafkaConfig parserName"
                        class="form-control"/>
                 <small class="help-block"
-                       ng-show="!form.cube_streaming_form.timeout.$error.required && form.cube_streaming_form.timeout.$invalid && (form.cube_streaming_form.timeout.$dirty||form.cube_streaming_form.$submitted)">
-                  Kafka timeout is invalid.
+                       ng-show="!form.cube_streaming_form.parserName.$error.required && form.cube_streaming_form.parserName.$invalid && (form.cube_streaming_form.parserName.$dirty||form.cube_streaming_form.$submitted)">
+                  Kafka parser name is invalid.
                 </small>
                 <small class="help-block"
-                       ng-show="form.cube_streaming_form.timeout.$error.required && (form.cube_streaming_form.timeout.$dirty||form.cube_streaming_form.$submitted)">
-                  Kafka timeout is required.
+                       ng-show="form.cube_streaming_form.parserName.$error.required && (form.cube_streaming_form.parserName.$dirty||form.cube_streaming_form.$submitted)">
+                  Kafka parser name is required.
                 </small>
-                <span ng-if="state.mode=='view'">{{kafkaMeta.timeout}}</span>
+                <span ng-if="state.mode=='view'">{{kafkaMeta.parserName}}</span>
               </div>
             </div>
           </div>
-
-          <div class="form-group middle-popover" ng-class="{'required':state.mode=='edit'}">
+          <div class="form-group middle-popover" ng-if="state.mode=='edit'&&state.target!=='kfkConfig'" ng-class="{'required':state.mode=='edit'}">
             <div class="row">
               <label class="col-xs-12 col-sm-3 control-label no-padding-right">
-                <b>Buffer Size</b>
-                <i class="fa fa-info-circle" kylinpopover placement="right" title="Buffer Size" template="BufferSizecTip.html"></i>
+                <b>Parser Timestamp Field</b>
+                <i class="fa fa-info-circle" kylinpopover placement="right" title="Parser Timestamp Field" template="ParserTimestampFieldsTip.html"></i>
               </label>
 
-              <div class="col-xs-12 col-sm-6"
-                   ng-class="{'has-error':form.cube_streaming_form.bufferSize.$invalid && (form.cube_streaming_form.bufferSize.$dirty||form.cube_streaming_form.$submitted)}">
-                <input ng-if="state.mode=='edit'" name="bufferSize" required ng-model="kafkaMeta.bufferSize" type="text"
-                       placeholder="Input kafkaConfig bufferSize"
-                       ng-pattern="/^\+?[1-9][0-9]*$/"
-                       class="form-control"/>
-                <small class="help-block"
-                       ng-show="!form.cube_streaming_form.bufferSize.$error.required && form.cube_streaming_form.bufferSize.$invalid && (form.cube_streaming_form.bufferSize.$dirty||form.cube_streaming_form.$submitted)">
-                  Kafka bufferSize is invalid.
-                </small>
-                <small class="help-block"
-                       ng-show="form.cube_streaming_form.bufferSize.$error.required && (form.cube_streaming_form.bufferSize.$dirty||form.cube_streaming_form.$submitted)">
-                  Kafka bufferSize is required.
-                </small>
-                <span ng-if="state.mode=='view'">{{kafkaMeta.bufferSize}}</span>
+              <div class="col-xs-12 col-sm-6 streamingParserCtr">
+                <select chosen ng-model="streamingCfg.parseTsColumn"
+                        ng-options="column as column for column in streamingCfg.columnOptions "
+                        ng-change="streamingTsColUpdate()"
+                        data-placeholder="select a column"
+                        class="chosen-select">
+                  <option value=""></option>
+                </select>
               </div>
             </div>
           </div>
-
           <div class="form-group middle-popover" ng-class="{'required':state.mode=='edit'}">
             <div class="row">
               <label class="col-xs-12 col-sm-3 control-label no-padding-right">
-                <b>Margin</b>
-                <i class="fa fa-info-circle" kylinpopover placement="right" title="Margin" template="MarginTip.html"></i>
+                <b>Parser Properties</b>
+                <i class="fa fa-info-circle" kylinpopover placement="right" title="Parser Properties" template="ParserPropertiesTip.html"></i>
               </label>
 
               <div class="col-xs-12 col-sm-6"
-                   ng-class="{'has-error':form.cube_streaming_form.margin.$invalid && (form.cube_streaming_form.margin.$dirty||form.cube_streaming_form.$submitted)}">
-                <input ng-if="state.mode=='edit'" name="margin" required ng-model="kafkaMeta.margin" type="text"
-                       placeholder="Input kafkaConfig margin"
-                       ng-pattern="/^\+?[1-9][0-9]*$/"
+                   ng-class="{'has-error':form.cube_streaming_form.parserProperties.$invalid && (form.cube_streaming_form.parserProperties.$dirty||form.cube_streaming_form.$submitted)}">
+                <input ng-if="state.mode=='edit'" name="parserProperties" required ng-model="kafkaMeta.parserProperties" type="text"
+                       placeholder="configA=1;configB=2"
                        class="form-control"/>
                 <small class="help-block"
-                       ng-show="!form.cube_streaming_form.margin.$error.required && form.cube_streaming_form.margin.$invalid && (form.cube_streaming_form.margin.$dirty||form.cube_streaming_form.$submitted)">
-                  Kafka margin is invalid.
+                       ng-show="!form.cube_streaming_form.parserProperties.$error.required && form.cube_streaming_form.parserProperties.$invalid && (form.cube_streaming_form.parserProperties.$dirty||form.cube_streaming_form.$submitted)">
+                  Parser properties is invalid.
                 </small>
                 <small class="help-block"
-                       ng-show="form.cube_streaming_form.margin.$error.required && (form.cube_streaming_form.margin.$dirty||form.cube_streaming_form.$submitted)">
-                  Kafka margin is required.
+                       ng-show="form.cube_streaming_form.parserProperties.$error.required && (form.cube_streaming_form.parserProperties.$dirty||form.cube_streaming_form.$submitted)">
+                  Parser properties is required.
                 </small>
-                <span ng-if="state.mode=='view'">{{kafkaMeta.margin}}</span>
+                <span ng-if="state.mode=='view'">{{kafkaMeta.parserProperties}}</span>
               </div>
             </div>
           </div>
         </accordion-group>
       </accordion>
-
       <hr/>
+      <!--Advanced setting-->
       <accordion>
-
-        <accordion-group is-open="state.isParserHeaderOpen">
+        <accordion-group is-open="state.isStreamingAdOpen" ng-init="state.isStreamingAdOpen">
           <accordion-heading>
-            Parser Setting
+            Advanced Setting
             <i class="pull-right glyphicon"
-               ng-class="{'glyphicon-chevron-down': state.isParserHeaderOpen, 'glyphicon-chevron-right': !state.isParserHeaderOpen}"></i>
+               ng-class="{'glyphicon-chevron-down': state.isStreamingAdOpen, 'glyphicon-chevron-right': !state.isStreamingAdOpen}"></i>
           </accordion-heading>
 
           <div class="form-group middle-popover" ng-class="{'required':state.mode=='edit'}">
             <div class="row">
               <label class="col-xs-12 col-sm-3 control-label no-padding-right">
-                <b>Parser Name</b>
-                <i class="fa fa-info-circle" kylinpopover placement="right" title="Parser Name" template="ParserName.html"></i>
+                <b>Timeout</b>
+                <i class="fa fa-info-circle" kylinpopover placement="right" title="Timeout" template="TimeoutTip.html"></i>
               </label>
 
               <div class="col-xs-12 col-sm-6"
-                   ng-class="{'has-error':form.cube_streaming_form.parserName.$invalid && (form.cube_streaming_form.parserName.$dirty||form.cube_streaming_form.$submitted)}">
-                <input ng-if="state.mode=='edit'" name="parserName" required ng-model="kafkaMeta.parserName" type="text"
-                       placeholder="Input kafkaConfig parserName"
+                   ng-class="{'has-error':form.cube_streaming_form.timeout.$invalid && (form.cube_streaming_form.timeout.$dirty||form.cube_streaming_form.$submitted)}">
+                <input ng-if="state.mode=='edit'" name="timeout" required ng-model="kafkaMeta.timeout" type="text"
+                       placeholder="Input kafkaConfig timeout"
+                       ng-pattern="/^\+?[1-9][0-9]*$/"
                        class="form-control"/>
                 <small class="help-block"
-                       ng-show="!form.cube_streaming_form.parserName.$error.required && form.cube_streaming_form.parserName.$invalid && (form.cube_streaming_form.parserName.$dirty||form.cube_streaming_form.$submitted)">
-                  Kafka parser name is invalid.
+                       ng-show="!form.cube_streaming_form.timeout.$error.required && form.cube_streaming_form.timeout.$invalid && (form.cube_streaming_form.timeout.$dirty||form.cube_streaming_form.$submitted)">
+                  Kafka timeout is invalid.
                 </small>
                 <small class="help-block"
-                       ng-show="form.cube_streaming_form.parserName.$error.required && (form.cube_streaming_form.parserName.$dirty||form.cube_streaming_form.$submitted)">
-                  Kafka parser name is required.
+                       ng-show="form.cube_streaming_form.timeout.$error.required && (form.cube_streaming_form.timeout.$dirty||form.cube_streaming_form.$submitted)">
+                  Kafka timeout is required.
                 </small>
-                <span ng-if="state.mode=='view'">{{kafkaMeta.parserName}}</span>
+                <span ng-if="state.mode=='view'">{{kafkaMeta.timeout}}</span>
               </div>
             </div>
           </div>
-          <div class="form-group middle-popover" ng-if="state.mode=='edit'&&state.target!=='kfkConfig'" ng-class="{'required':state.mode=='edit'}">
-            <div class="row">
-              <label class="col-xs-12 col-sm-3 control-label no-padding-right">
-                <b>Parser Timestamp Field</b>
-                <i class="fa fa-info-circle" kylinpopover placement="right" title="Parser Timestamp Field" template="ParserTimestampFieldsTip.html"></i>
-              </label>
 
-              <div class="col-xs-12 col-sm-6 streamingParserCtr">
-                  <select chosen ng-model="streamingCfg.parseTsColumn"
-                           ng-options="column as column for column in streamingCfg.columnOptions "
-                           ng-change="streamingTsColUpdate()"
-                           data-placeholder="select a column"
-                           class="chosen-select">
-                    <option value=""></option>
-                  </select>
-              </div>
-            </div>
-          </div>
           <div class="form-group middle-popover" ng-class="{'required':state.mode=='edit'}">
             <div class="row">
               <label class="col-xs-12 col-sm-3 control-label no-padding-right">
-                <b>Parser Properties</b>
-                <i class="fa fa-info-circle" kylinpopover placement="right" title="Parser Properties" template="ParserPropertiesTip.html"></i>
+                <b>Buffer Size</b>
+                <i class="fa fa-info-circle" kylinpopover placement="right" title="Buffer Size" template="BufferSizecTip.html"></i>
               </label>
 
               <div class="col-xs-12 col-sm-6"
-                   ng-class="{'has-error':form.cube_streaming_form.parserProperties.$invalid && (form.cube_streaming_form.parserProperties.$dirty||form.cube_streaming_form.$submitted)}">
-                <input ng-if="state.mode=='edit'" name="parserProperties" required ng-model="kafkaMeta.parserProperties" type="text"
-                       placeholder="configA=1;configB=2"
+                   ng-class="{'has-error':form.cube_streaming_form.bufferSize.$invalid && (form.cube_streaming_form.bufferSize.$dirty||form.cube_streaming_form.$submitted)}">
+                <input ng-if="state.mode=='edit'" name="bufferSize" required ng-model="kafkaMeta.bufferSize" type="text"
+                       placeholder="Input kafkaConfig bufferSize"
+                       ng-pattern="/^\+?[1-9][0-9]*$/"
                        class="form-control"/>
                 <small class="help-block"
-                       ng-show="!form.cube_streaming_form.parserProperties.$error.required && form.cube_streaming_form.parserProperties.$invalid && (form.cube_streaming_form.parserProperties.$dirty||form.cube_streaming_form.$submitted)">
-                  Parser properties is invalid.
+                       ng-show="!form.cube_streaming_form.bufferSize.$error.required && form.cube_streaming_form.bufferSize.$invalid && (form.cube_streaming_form.bufferSize.$dirty||form.cube_streaming_form.$submitted)">
+                  Kafka bufferSize is invalid.
                 </small>
                 <small class="help-block"
-                       ng-show="form.cube_streaming_form.parserProperties.$error.required && (form.cube_streaming_form.parserProperties.$dirty||form.cube_streaming_form.$submitted)">
-                  Parser properties is required.
+                       ng-show="form.cube_streaming_form.bufferSize.$error.required && (form.cube_streaming_form.bufferSize.$dirty||form.cube_streaming_form.$submitted)">
+                  Kafka bufferSize is required.
                 </small>
-                <span ng-if="state.mode=='view'">{{kafkaMeta.parserProperties}}</span>
+                <span ng-if="state.mode=='view'">{{kafkaMeta.bufferSize}}</span>
               </div>
             </div>
           </div>
+
         </accordion-group>
       </accordion>
 
+
+
     </div>
   </form>
 </div>
diff --git a/webapp/app/partials/cubes/cube_detail.html b/webapp/app/partials/cubes/cube_detail.html
index 909e05e5c7..1896b424a7 100755
--- a/webapp/app/partials/cubes/cube_detail.html
+++ b/webapp/app/partials/cubes/cube_detail.html
@@ -38,7 +38,7 @@
         </li>
         <li class="{{cube.visiblePage=='hbase'? 'active':''}}"
             ng-if="userService.hasRole('ROLE_ADMIN')">
-            <a href="" ng-click="cube.visiblePage='hbase';getHbaseInfo(cube)">HBase</a>
+            <a href="" ng-click="cube.visiblePage='hbase';getHbaseInfo(cube)">Storage</a>
         </li>
     </ul>
 
@@ -95,27 +95,24 @@ <h5>Notification List(Comma Separated)</h5>
 
   <div class="cube-detail" ng-show="cube.visiblePage=='hbase'">
         <div style="margin: 15px;">
+            <div ng-if="cube.hbase">
+              <div class="hr hr8 hr-double hr-dotted"></div>
+              <h5><b>Segment Number:</b> <span class="red">{{cube.hbase.length}}</span> <b>Total Size:</b> <span class="red">{{cube.totalSize | bytes}}</span></h5>
+            </div>
             <div ng-repeat="table in cube.hbase">
-                <h5><b>HTable:</b> {{table.tableName}}</h5>
+                <h5><b>Segment:</b> {{table.segmentName}}</h5>
                 <ul>
-                    <li ng-if="cube.streaming">Segment Name: <span class="red">{{table.segmentName}}</span></li>
-                    <li ng-if="cube.streaming">Segment Status: <span class="red">{{table.segmentStatus}}</span></li>
-                    <li ng-if="cube.streaming">Source Count: <span class="red">{{table.sourceCount}}</span></li>
-                    <li ng-if="cube.streaming&&table.sourceOffsetStart>0">SourceOffsetStart: <span class="red">{{table.sourceOffsetStart|reverseToGMT0}}</span></li>
-                    <li ng-if="cube.streaming&&table.sourceOffsetEnd>0">SourceOffsetEnd: <span class="red">{{table.sourceOffsetEnd|reverseToGMT0}}</span></li>
-                    <li>Region Count: <span class="red">{{table.regionCount}}</span></li>
-                    <li>Size: <span class="red">{{table.tableSize | bytes}}</span></li>
+                    <li ng-if="cube.streaming">Status: <span class="red">{{table.segmentStatus}}</span></li>
                     <li>Start Time: <span class="red">{{table.dateRangeStart | reverseToGMT0}}</span></li>
                     <li>End Time: <span class="red">{{table.dateRangeEnd | reverseToGMT0}}</span></li>
+                    <li>Source Count: <span class="red">{{table.sourceCount}}</span></li>
+                    <li>HBase Table: <span class="red">{{table.tableName}}</span></li>
+                    <li>Region Count: <span class="red">{{table.regionCount}}</span></li>
+                    <li>Size: <span class="red">{{table.tableSize | bytes}}</span></li>
                 </ul>
             </div>
-            <div ng-if="cube.hbase">
-                <div class="hr hr8 hr-double hr-dotted"></div>
-                <h5><b>Total Size:</b> <span class="red">{{cube.totalSize | bytes}}</span></h5>
-                <h5><b>Total Number:</b> <span class="red">{{cube.hbase.length}}</span></h5>
-            </div>
             <div ng-if="cube.hbase.length == 0">
-                <h5>No HBase Info.</h5>
+                <h5>No Storage Info.</h5>
             </div>
         </div>
     </div>
diff --git a/webapp/app/partials/cubes/cubes.html b/webapp/app/partials/cubes/cubes.html
index 7b3a53354c..0187523905 100644
--- a/webapp/app/partials/cubes/cubes.html
+++ b/webapp/app/partials/cubes/cubes.html
@@ -91,7 +91,7 @@
                     <ul class="dropdown-menu" role="menu">
                         <li ng-if="cube.status!='READY' && userService.hasRole('ROLE_ADMIN') ">
                             <a ng-click="dropCube(cube)" tooltip="Drop the cube, related jobs and data permanently.">Drop</a></li>
-                        <li ng-if="cube.status!='READY' && (userService.hasRole('ROLE_ADMIN') || hasPermission(cube, permissions.ADMINISTRATION.mask, permissions.MANAGEMENT.mask))">
+                        <li ng-if="(userService.hasRole('ROLE_ADMIN') || hasPermission(cube, permissions.ADMINISTRATION.mask, permissions.MANAGEMENT.mask))">
                             <a ng-click="cubeEdit(cube);">Edit</a></li>
                         <li ng-if="cube.streaming && cube.status=='DISABLED' && (userService.hasRole('ROLE_ADMIN') || hasPermission(cube, permissions.ADMINISTRATION.mask, permissions.MANAGEMENT.mask))"></li>
                         <li ng-if="cube.status!='DESCBROKEN'"><a ng-click="startJobSubmit(cube);">Build</a></li>


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services