You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by li...@apache.org on 2017/02/21 00:52:42 UTC

[01/11] kylin git commit: default kylin.engine.mr.uhc-reducer-count to 1 in code, while set it to 3 in CI [Forced Update!]

Repository: kylin
Updated Branches:
  refs/heads/master-hbase0.98 0bcd399b0 -> f5432f214 (forced update)


default kylin.engine.mr.uhc-reducer-count to 1 in code, while set it to 3 in CI


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/92533c77
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/92533c77
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/92533c77

Branch: refs/heads/master-hbase0.98
Commit: 92533c7771f676634a37710a1a0fa4ce68ec162a
Parents: 385d801
Author: Li Yang <li...@apache.org>
Authored: Mon Feb 20 14:03:59 2017 +0800
Committer: Li Yang <li...@apache.org>
Committed: Mon Feb 20 14:03:59 2017 +0800

----------------------------------------------------------------------
 .../src/main/java/org/apache/kylin/common/KylinConfigBase.java     | 2 +-
 examples/test_case_data/localmeta/kylin.properties                 | 2 +-
 examples/test_case_data/sandbox/kylin.properties                   | 2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/92533c77/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 2025a37..50d87a4 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -754,7 +754,7 @@ abstract public class KylinConfigBase implements Serializable {
 
     //UHC: ultra high cardinality columns, contain the ShardByColumns and the GlobalDictionaryColumns
     public int getUHCReducerCount() {
-        return Integer.parseInt(getOptional("kylin.engine.mr.uhc-reducer-count", "3"));
+        return Integer.parseInt(getOptional("kylin.engine.mr.uhc-reducer-count", "1"));
     }
 
     public boolean isBuildDictInReducerEnabled() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/92533c77/examples/test_case_data/localmeta/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/localmeta/kylin.properties b/examples/test_case_data/localmeta/kylin.properties
index e89c767..eb4f35d 100644
--- a/examples/test_case_data/localmeta/kylin.properties
+++ b/examples/test_case_data/localmeta/kylin.properties
@@ -73,7 +73,7 @@ kylin.job.max-concurrent-jobs=10
 kylin.engine.mr.yarn-check-interval-seconds=10
 
 # for test
-kylin.engine.mr.uhc-reducer-count=1
+kylin.engine.mr.uhc-reducer-count=3
 
 ### CUBE ###
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/92533c77/examples/test_case_data/sandbox/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
index 91566ae..7040cf3 100644
--- a/examples/test_case_data/sandbox/kylin.properties
+++ b/examples/test_case_data/sandbox/kylin.properties
@@ -114,7 +114,7 @@ kylin.query.udf.version=org.apache.kylin.query.udf.VersionUDF
 
 # for test
 kylin.job.lock=org.apache.kylin.job.lock.MockJobLock
-kylin.engine.mr.uhc-reducer-count=1
+kylin.engine.mr.uhc-reducer-count=3
 
 ### CUBE ###
 


[04/11] kylin git commit: KYLIN-2452 code review

Posted by li...@apache.org.
KYLIN-2452 code review


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/bf813401
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/bf813401
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/bf813401

Branch: refs/heads/master-hbase0.98
Commit: bf8134010a927b35b942957080466bce7ccac6c9
Parents: 42d09a7
Author: shaofengshi <sh...@apache.org>
Authored: Mon Feb 20 15:30:59 2017 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Mon Feb 20 15:30:59 2017 +0800

----------------------------------------------------------------------
 .../src/main/java/org/apache/kylin/cube/model/CubeDesc.java     | 5 +++--
 .../kylin/cube/model/validation/rule/AggregationGroupRule.java  | 5 +++++
 .../java/org/apache/kylin/rest/controller/CubeController.java   | 4 ----
 3 files changed, 8 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/bf813401/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
index 0f804d1..95e3343 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
@@ -440,8 +440,9 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
     }
 
     public int getBuildLevel() {
-        if (aggregationGroups.size() <= 0)
-            throw new IllegalStateException("AggregationGroup size is: " + aggregationGroups.size() + ", there should be at least one AggregationGroup!");
+        if (aggregationGroups == null || aggregationGroups.size() == 0)
+            throw new IllegalStateException("Cube has no aggregation group.");
+
         return Collections.max(Collections2.transform(aggregationGroups, new Function<AggregationGroup, Integer>() {
             @Nullable
             @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/bf813401/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/AggregationGroupRule.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/AggregationGroupRule.java b/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/AggregationGroupRule.java
index 3f865ea..c9e2d28 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/AggregationGroupRule.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/validation/rule/AggregationGroupRule.java
@@ -50,6 +50,11 @@ public class AggregationGroupRule implements IValidatorRule<CubeDesc> {
 
     private void inner(CubeDesc cube, ValidateContext context) {
 
+        if (cube.getAggregationGroups() == null || cube.getAggregationGroups().size() == 0) {
+            context.addResult(ResultLevel.ERROR, "Cube should have at least one Aggregation group.");
+            return;
+        }
+
         int index = 0;
         for (AggregationGroup agg : cube.getAggregationGroups()) {
             if (agg.getIncludes() == null) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/bf813401/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
index fb4be5a..da44aec 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
@@ -441,10 +441,6 @@ public class CubeController extends BasicController {
             throw new BadRequestException("Invalid Cube name, only letters, numbers and underline supported.");
         }
 
-        int aggSize = desc.getAggregationGroups().size();
-        if (aggSize <= 0)
-            throw new IllegalStateException("AggregationGroup size is: " + aggSize + ", there should be at least one AggregationGroup!");
-
         try {
             desc.setUuid(UUID.randomUUID().toString());
             String projectName = (null == cubeRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME : cubeRequest.getProject();


[02/11] kylin git commit: KYLIN-2331 update IEngineAware.ID_SPARK to be consistent with KylinConfig

Posted by li...@apache.org.
KYLIN-2331 update IEngineAware.ID_SPARK to be consistent with KylinConfig


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/bbcb0ed5
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/bbcb0ed5
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/bbcb0ed5

Branch: refs/heads/master-hbase0.98
Commit: bbcb0ed52c121bc68bea835e0fa0be7d9397b986
Parents: 92533c7
Author: shaofengshi <sh...@apache.org>
Authored: Mon Feb 20 14:23:12 2017 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Mon Feb 20 14:23:17 2017 +0800

----------------------------------------------------------------------
 .../src/main/java/org/apache/kylin/common/KylinConfigBase.java | 6 +++---
 .../java/org/apache/kylin/metadata/model/IEngineAware.java     | 2 +-
 2 files changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/bbcb0ed5/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 50d87a4..f7d8452 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -692,9 +692,9 @@ abstract public class KylinConfigBase implements Serializable {
     public Map<Integer, String> getJobEngines() {
         Map<Integer, String> r = Maps.newLinkedHashMap();
         // ref constants in IEngineAware
-        r.put(0, "org.apache.kylin.engine.mr.MRBatchCubingEngine");
-        r.put(2, "org.apache.kylin.engine.mr.MRBatchCubingEngine2");
-        r.put(4, "org.apache.kylin.engine.spark.SparkBatchCubingEngine2");
+        r.put(0, "org.apache.kylin.engine.mr.MRBatchCubingEngine"); //IEngineAware.ID_MR_V1
+        r.put(2, "org.apache.kylin.engine.mr.MRBatchCubingEngine2"); //IEngineAware.ID_MR_V2
+        r.put(4, "org.apache.kylin.engine.spark.SparkBatchCubingEngine2"); //IEngineAware.ID_SPARK
         r.putAll(convertKeyToInteger(getPropertiesByPrefix("kylin.engine.provider.")));
         return r;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/bbcb0ed5/core-metadata/src/main/java/org/apache/kylin/metadata/model/IEngineAware.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/IEngineAware.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/IEngineAware.java
index 882b2e3..78f7faa 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/IEngineAware.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/IEngineAware.java
@@ -23,7 +23,7 @@ public interface IEngineAware {
     public static final int ID_MR_V1 = 0;
     public static final int ID_MR_V2 = 2;
     public static final int ID_MR_II = 3;
-    public static final int ID_SPARK = 5;
+    public static final int ID_SPARK = 4;
 
     int getEngineType();
 }


[06/11] kylin git commit: KYLIN-2451 rework to respect cube-level coprocessor-timeout-seconds

Posted by li...@apache.org.
KYLIN-2451 rework to respect cube-level coprocessor-timeout-seconds


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/d7ecd6e2
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/d7ecd6e2
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/d7ecd6e2

Branch: refs/heads/master-hbase0.98
Commit: d7ecd6e2f1a87bf82d3cf6645eab013b873dde8d
Parents: c52b698
Author: Li Yang <li...@apache.org>
Authored: Mon Feb 20 17:21:21 2017 +0800
Committer: Li Yang <li...@apache.org>
Committed: Mon Feb 20 17:21:21 2017 +0800

----------------------------------------------------------------------
 .../kylin/storage/hbase/HBaseConnection.java    |  6 ----
 .../storage/hbase/cube/v2/CubeHBaseRPC.java     | 37 ++++++++++++--------
 2 files changed, 22 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/d7ecd6e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
index e3d2308..73f31c5 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
@@ -37,7 +37,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
@@ -163,11 +162,6 @@ public class HBaseConnection {
             conf.set("hbase.fs.tmp.dir", "/tmp");
         }
 
-        // set RPC timeout
-        if (kylinConf.getQueryCoprocessorTimeoutSeconds() > 0) {
-            conf.set(HConstants.HBASE_RPC_TIMEOUT_KEY, "" + (1000 * kylinConf.getQueryCoprocessorTimeoutSeconds()));
-        }
-
         return conf;
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/d7ecd6e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
index f24290c..88e7176 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseRPC.java
@@ -35,7 +35,6 @@ import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.cube.CubeSegment;
-import org.apache.kylin.metadata.model.ISegment;
 import org.apache.kylin.cube.cuboid.Cuboid;
 import org.apache.kylin.cube.kv.FuzzyKeyEncoder;
 import org.apache.kylin.cube.kv.FuzzyMaskEncoder;
@@ -49,6 +48,7 @@ import org.apache.kylin.gridtable.GTInfo;
 import org.apache.kylin.gridtable.GTRecord;
 import org.apache.kylin.gridtable.GTScanRange;
 import org.apache.kylin.gridtable.IGTStorage;
+import org.apache.kylin.metadata.model.ISegment;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -283,23 +283,30 @@ public abstract class CubeHBaseRPC implements IGTStorage {
     }
 
     protected int getCoprocessorTimeoutMillis() {
-        int configTimeout = cubeSeg.getConfig().getQueryCoprocessorTimeoutSeconds() * 1000;
-        if (configTimeout == 0) {
-            configTimeout = Integer.MAX_VALUE;
+        int coopTimeout;
+        if (BackdoorToggles.getQueryTimeout() != -1) {
+            coopTimeout = BackdoorToggles.getQueryTimeout();
+        } else {
+            coopTimeout = cubeSeg.getConfig().getQueryCoprocessorTimeoutSeconds() * 1000;
         }
-
+        
+        int rpcTimeout;
         Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
-        int rpcTimeout = hconf.getInt(HConstants.HBASE_RPC_TIMEOUT_KEY, HConstants.DEFAULT_HBASE_RPC_TIMEOUT);
-        // final timeout should be smaller than rpc timeout
-        int upper = (int) (rpcTimeout * 0.9);
-
-        int timeout = Math.min(upper, configTimeout);
-        if (BackdoorToggles.getQueryTimeout() != -1) {
-            timeout = Math.min(upper, BackdoorToggles.getQueryTimeout());
+        rpcTimeout = hconf.getInt(HConstants.HBASE_RPC_TIMEOUT_KEY, HConstants.DEFAULT_HBASE_RPC_TIMEOUT);
+        
+        // HBase rpc timeout must be longer than coprocessor timeout
+        if ((int) (coopTimeout * 1.1) > rpcTimeout) {
+            rpcTimeout = (int) (coopTimeout * 1.1);
+            hconf.setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, rpcTimeout);
         }
-
-        logger.debug("{} = {} ms, use {} ms as timeout for coprocessor", HConstants.HBASE_RPC_TIMEOUT_KEY, rpcTimeout, timeout);
-        return timeout;
+        
+        // coprocessor timeout is 0 by default
+        if (coopTimeout <= 0) {
+            coopTimeout = (int) (rpcTimeout * 0.9);
+        }
+        
+        logger.debug("{} = {} ms, use {} ms as timeout for coprocessor", HConstants.HBASE_RPC_TIMEOUT_KEY, rpcTimeout, coopTimeout);
+        return coopTimeout;
     }
 
 }


[07/11] kylin git commit: KYLIN-1875,refine the alisMap

Posted by li...@apache.org.
KYLIN-1875,refine the alisMap

Signed-off-by: Hongbin Ma <ma...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/ed413ee3
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/ed413ee3
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/ed413ee3

Branch: refs/heads/master-hbase0.98
Commit: ed413ee34a6f0301b57133ef7b4d45d58fc9f534
Parents: d7ecd6e
Author: luguosheng <55...@qq.com>
Authored: Mon Feb 20 16:36:53 2017 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Mon Feb 20 18:29:35 2017 +0800

----------------------------------------------------------------------
 webapp/app/js/controllers/cubeDimensions.js     | 26 +-----
 webapp/app/js/controllers/cubeEdit.js           | 58 ++----------
 webapp/app/js/controllers/cubeSchema.js         | 94 +++++++++++++++++++-
 webapp/app/js/model/modelsManager.js            |  8 ++
 .../app/partials/cubeDesigner/dimensions.html   |  6 +-
 5 files changed, 112 insertions(+), 80 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/ed413ee3/webapp/app/js/controllers/cubeDimensions.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/controllers/cubeDimensions.js b/webapp/app/js/controllers/cubeDimensions.js
index 80d54bb..44739ac 100644
--- a/webapp/app/js/controllers/cubeDimensions.js
+++ b/webapp/app/js/controllers/cubeDimensions.js
@@ -52,19 +52,6 @@ KylinApp.controller('CubeDimensionsCtrl', function ($scope, $modal,MetaModel,cub
 
         var rootFactTable = VdmUtil.removeNameSpace($scope.metaModel.model.fact_table);
 
-        if($scope.aliasName.length==0){
-          $scope.aliasName.push(rootFactTable);
-          $scope.aliasTableMap[rootFactTable]=$scope.metaModel.model.fact_table;
-          $scope.tableAliasMap[$scope.metaModel.model.fact_table]=rootFactTable;
-          angular.forEach($scope.metaModel.model.lookups,function(joinTable){
-            if(!joinTable.alias){
-              joinTable.alias=VdmUtil.removeNameSpace(joinTable.table);
-            }
-            $scope.aliasTableMap[joinTable.alias]=joinTable.table;
-            $scope.tableAliasMap[joinTable.table]=joinTable.alias;
-            $scope.aliasName.push(joinTable.alias);
-          });
-        }
         // At first dump the columns of fact table.
 
         var cols = $scope.getDimColumnsByAlias(rootFactTable);
@@ -83,17 +70,10 @@ KylinApp.controller('CubeDimensionsCtrl', function ($scope, $modal,MetaModel,cub
         $scope.availableColumns[rootFactTable] = cols;
         factSelectAvailable.all=false;
         $scope.selectedColumns[rootFactTable] = factSelectAvailable;
-//        $scope.availableTables.push(rootFactTable);
-        $scope.availableFactTables.push(rootFactTable);
         // Then dump each lookup tables.
         var lookups = $scope.metaModel.model.lookups;
 
         for (var j = 0; j < lookups.length; j++) {
-            if(lookups[j].kind=="FACT"){
-                $scope.availableFactTables.push(lookups[j].alias);
-            }else{
-                $scope.availableLookupTables.push(lookups[j].alias);
-            }
             var cols2 = $scope.getDimColumnsByAlias(lookups[j].alias);
 
             // Initialize selected available.
@@ -380,7 +360,7 @@ KylinApp.controller('CubeDimensionsCtrl', function ($scope, $modal,MetaModel,cub
         var selectedCols = $scope.getSelectedCols();
         dimList=[];
         angular.forEach(selectedCols, function (cols, table) {
-            if ($scope.availableFactTables.indexOf(table)!=-1) {
+            if ($scope.modelsManager.availableFactTables.indexOf(table)!=-1) {
                 // Fact table: for each selected column, create one normal dimension.
                 for (var i = 0; i < cols.length; i++) {
                     dimList.push(Dimension(cols[i].name, table, [cols[i].col], 'normal'));
@@ -401,7 +381,7 @@ KylinApp.controller('CubeDimensionsCtrl', function ($scope, $modal,MetaModel,cub
     };
 
     $scope.autoChange = function(table,name){
-         if($scope.availableFactTables.indexOf(table)!=-1){
+         if($scope.modelsManager.availableFactTables.indexOf(table)!=-1){
                if($scope.selectedColumns[table][name].selected==false){
                     $scope.selectedColumns[table].all=false;
                }else{
@@ -435,7 +415,7 @@ KylinApp.controller('CubeDimensionsCtrl', function ($scope, $modal,MetaModel,cub
     }
 
     $scope.autoChangeAll= function(table){
-         if($scope.availableFactTables.indexOf(table)!=-1){
+         if($scope.modelsManager.availableFactTables.indexOf(table)!=-1){
               if($scope.selectedColumns[table].all==true){
                    angular.forEach($scope.selectedColumns[table],function(col){
                         if(typeof col==="object"){

http://git-wip-us.apache.org/repos/asf/kylin/blob/ed413ee3/webapp/app/js/controllers/cubeEdit.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/controllers/cubeEdit.js b/webapp/app/js/controllers/cubeEdit.js
index bc0bceb..92d8305 100755
--- a/webapp/app/js/controllers/cubeEdit.js
+++ b/webapp/app/js/controllers/cubeEdit.js
@@ -23,11 +23,7 @@ KylinApp.controller('CubeEditCtrl', function ($scope, $q, $routeParams, $locatio
   $scope.cubeConfig = cubeConfig;
   $scope.metaModel = {};
   $scope.modelsManager = modelsManager;
-  $scope.tableAliasMap=$scope.modelsManager.tableAliasMap;
-  $scope.aliasTableMap=$scope.modelsManager.aliasTableMap;
-  $scope.availableFactTables =$scope.modelsManager.availableFactTables;
-  $scope.availableLookupTables =$scope.modelsManager.availableLookupTables;
-  $scope.aliasName=$scope.modelsManager.aliasName;
+
   //add or edit ?
   var absUrl = $location.absUrl();
   $scope.cubeMode = absUrl.indexOf("/cubes/add") != -1 ? 'addNewCube' : absUrl.indexOf("/cubes/edit") != -1 ? 'editExistCube' : 'default';
@@ -86,41 +82,6 @@ KylinApp.controller('CubeEditCtrl', function ($scope, $q, $routeParams, $locatio
     $scope.store.supportedEncoding = $scope.cubeConfig.encodings;
   })
 
-  $scope.getDatabaseByColumnName=function(column){
-    return  VdmUtil.getNameSpaceTopName($scope.aliasTableMap[VdmUtil.getNameSpaceTopName(column)])
-  }
-  $scope.getColumnTypeByAliasName=function(column){
-    return TableModel.columnNameTypeMap[$scope.aliasTableMap[VdmUtil.getNameSpaceTopName(column)]+'.'+VdmUtil.removeNameSpace(column)];
-  }
-  $scope.getEncodings =function (name){
-    var filterName=name;
-    var columnType= modelsManager.getColumnTypeByColumnName(filterName);
-    var matchList=VdmUtil.getObjValFromLikeKey($scope.store.encodingMaps,columnType);
-    var encodings =$scope.store.supportedEncoding,filterEncoding;
-    if($scope.isEdit){
-      var rowkey_columns=$scope.cubeMetaFrame.rowkey.rowkey_columns;
-      if(rowkey_columns&&filterName){
-        for(var s=0;s<rowkey_columns.length;s++){
-          var database=$scope.getDatabaseByColumnName(rowkey_columns[s].column);
-          if(filterName==rowkey_columns[s].column){
-            var version=rowkey_columns[s].encoding_version;
-            var noLenEncoding=rowkey_columns[s].encoding.replace(/:\d+/,"");
-            filterEncoding=VdmUtil.getFilterObjectListByOrFilterVal(encodings,'value',noLenEncoding+(version?"[v"+version+"]":"[v1]"),'suggest',true)
-            matchList.push(noLenEncoding);
-            filterEncoding=VdmUtil.getObjectList(filterEncoding,'baseValue',matchList);
-            break;
-          }
-        }
-      }else{
-        filterEncoding=VdmUtil.getFilterObjectListByOrFilterVal(encodings,'suggest',true);
-        filterEncoding=VdmUtil.getObjectList(filterEncoding,'baseValue',matchList)
-      }
-    }else{
-      filterEncoding=VdmUtil.getFilterObjectListByOrFilterVal(encodings,'suggest',true);
-      filterEncoding=VdmUtil.getObjectList(filterEncoding,'baseValue',matchList)
-    }
-    return filterEncoding;
-  }
   $scope.getColumnsByAlias = function (alias) {
     var temp = [];
     angular.forEach(TableModel.selectProjectTables, function (table) {
@@ -145,7 +106,7 @@ KylinApp.controller('CubeEditCtrl', function ($scope, $q, $routeParams, $locatio
     if (!alias) {
       return [];
     }
-    var tableColumns = $scope.getColumnsByAlias(alias);
+    var tableColumns = $scope.modelsManager.getColumnsByAlias(alias);
     var tableDim = _.find($scope.metaModel.model.dimensions, function (dimension) {
       return dimension.table == alias
     });
@@ -229,7 +190,7 @@ KylinApp.controller('CubeEditCtrl', function ($scope, $q, $routeParams, $locatio
     var me_columns = [];
     //add cube dimension column for specific measure
     angular.forEach($scope.cubeMetaFrame.dimensions,function(dimension,index){
-      if($scope.availableFactTables.indexOf(dimension.table)==-1){
+      if($scope.modelsManager.availableFactTables.indexOf(dimension.table)==-1){
         return;
       }
       if(dimension.column && dimension.derived == null){
@@ -261,23 +222,14 @@ KylinApp.controller('CubeEditCtrl', function ($scope, $q, $routeParams, $locatio
            me_columns.push(measure.function.parameter.value);
          }
     });
-
-    var unique = []
-
-    angular.forEach(me_columns, function (column) {
-        if (unique.indexOf(column) === -1) {
-          unique.push(column);
-        }
-      });
-
-      return unique;
+    return distinct_array(me_columns);
 
   };
 
 
 
   $scope.getColumnType = function (_column, alias) {
-    var columns = $scope.getColumnsByAlias(alias);
+    var columns = $scope.modelsManager.getColumnsByAlias(alias);
     var type;
     angular.forEach(columns, function (column) {
       if (_column === column.name) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/ed413ee3/webapp/app/js/controllers/cubeSchema.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/controllers/cubeSchema.js b/webapp/app/js/controllers/cubeSchema.js
index 6aeea3c..91a1567 100755
--- a/webapp/app/js/controllers/cubeSchema.js
+++ b/webapp/app/js/controllers/cubeSchema.js
@@ -18,7 +18,7 @@
 
 'use strict';
 
-KylinApp.controller('CubeSchemaCtrl', function ($scope, QueryService, UserService,modelsManager, ProjectService, AuthenticationService,$filter,ModelService,MetaModel,CubeDescModel,CubeList,TableModel,ProjectModel,ModelDescService,SweetAlert,cubesManager,StreamingService,CubeService) {
+KylinApp.controller('CubeSchemaCtrl', function ($scope, QueryService, UserService,modelsManager, ProjectService, AuthenticationService,$filter,ModelService,MetaModel,CubeDescModel,CubeList,TableModel,ProjectModel,ModelDescService,SweetAlert,cubesManager,StreamingService,CubeService,VdmUtil) {
     $scope.modelsManager = modelsManager;
     $scope.cubesManager = cubesManager;
     $scope.projects = [];
@@ -37,6 +37,98 @@ KylinApp.controller('CubeSchemaCtrl', function ($scope, QueryService, UserServic
 
     $scope.curStep = $scope.wizardSteps[0];
 
+  $scope.getTypeVersion=function(typename){
+    var searchResult=/\[v(\d+)\]/.exec(typename);
+    if(searchResult&&searchResult.length){
+      return searchResult.length&&searchResult[1]||1;
+    }else{
+      return 1;
+    }
+  }
+  $scope.removeVersion=function(typename){
+    if(typename){
+      return typename.replace(/\[v\d+\]/g,"").replace(/\s+/g,'');
+    }
+    return "";
+  }
+
+  //init encoding list
+  $scope.store = {
+    supportedEncoding:[],
+    encodingMaps:{}
+  }
+  TableModel.getColumnTypeEncodingMap().then(function(data){
+    $scope.store.encodingMaps=data;
+  });
+  CubeService.getValidEncodings({}, function (encodings) {
+    if(encodings){
+      for(var i in encodings)
+        if(VdmUtil.isNotExtraKey(encodings,i)){
+          var value = i
+          var name = value;
+          var typeVersion=+encodings[i]||1;
+          var suggest=false,selecttips='';
+          if(/\d+/.test(""+typeVersion)&&typeVersion>=1){
+            for(var s=1;s<=typeVersion;s++){
+              if(s==typeVersion){
+                suggest=true;
+              }
+              if(value=="int"){
+                name = "int (deprecated)";
+                suggest=false;
+              }
+              if(typeVersion>1){
+                selecttips="(v"+s;
+                if(s==typeVersion){
+                  selecttips=",suggest)"
+                }
+                selecttips=')';
+              }
+              $scope.store.supportedEncoding.push({
+                "name":name+selecttips,
+                "value":value+"[v"+s+"]",
+                "version":typeVersion,
+                "baseValue":value,
+                "suggest":suggest
+              });
+            }
+          }
+        }
+    }
+  },function(e){
+    $scope.store.supportedEncoding = $scope.cubeConfig.encodings;
+  })
+  $scope.getEncodings =function (name){
+    var filterName=name;
+    var columnType= $scope.modelsManager.getColumnTypeByColumnName(filterName);
+    var matchList=VdmUtil.getObjValFromLikeKey($scope.store.encodingMaps,columnType);
+    var encodings =$scope.store.supportedEncoding,filterEncoding;
+    if($scope.isEdit){
+      var rowkey_columns=$scope.cubeMetaFrame.rowkey.rowkey_columns;
+      if(rowkey_columns&&filterName){
+        for(var s=0;s<rowkey_columns.length;s++){
+          if(filterName==rowkey_columns[s].column){
+            var version=rowkey_columns[s].encoding_version;
+            var noLenEncoding=rowkey_columns[s].encoding.replace(/:\d+/,"");
+            filterEncoding=VdmUtil.getFilterObjectListByOrFilterVal(encodings,'value',noLenEncoding+(version?"[v"+version+"]":"[v1]"),'suggest',true)
+            matchList.push(noLenEncoding);
+            filterEncoding=VdmUtil.getObjectList(filterEncoding,'baseValue',matchList);
+            break;
+          }
+        }
+      }else{
+        filterEncoding=VdmUtil.getFilterObjectListByOrFilterVal(encodings,'suggest',true);
+        filterEncoding=VdmUtil.getObjectList(filterEncoding,'baseValue',matchList)
+      }
+    }else{
+      filterEncoding=VdmUtil.getFilterObjectListByOrFilterVal(encodings,'suggest',true);
+      filterEncoding=VdmUtil.getObjectList(filterEncoding,'baseValue',matchList)
+    }
+    return filterEncoding;
+  }
+
+
+
     $scope.allCubes = [];
 
     $scope.getTypeVersion=function(typename){

http://git-wip-us.apache.org/repos/asf/kylin/blob/ed413ee3/webapp/app/js/model/modelsManager.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/model/modelsManager.js b/webapp/app/js/model/modelsManager.js
index f00f5cc..99a4646 100644
--- a/webapp/app/js/model/modelsManager.js
+++ b/webapp/app/js/model/modelsManager.js
@@ -101,6 +101,11 @@ KylinApp.service('modelsManager',function(ModelService,CubeService,$q,AccessServ
 
     this.initAliasMapByModelSchema=function(metaModel){
       var rootFactTable = VdmUtil.removeNameSpace(metaModel.model.fact_table);
+      this.tableAliasMap={};
+      this.aliasTableMap={};
+      this.availableFactTables=[];
+      this.availableLookupTables=[];
+      this.aliasName=[];
       this.availableFactTables.push(rootFactTable);
       this.aliasName.push(rootFactTable);
       this.aliasTableMap[rootFactTable]=metaModel.model.fact_table;
@@ -126,5 +131,8 @@ KylinApp.service('modelsManager',function(ModelService,CubeService,$q,AccessServ
     this.getColumnTypeByColumnName=function(column){
       return TableModel.columnNameTypeMap[this.aliasTableMap[VdmUtil.getNameSpaceTopName(column)]+'.'+VdmUtil.removeNameSpace(column)];
     }
+    this.getColumnsByAlias=function(alias){
+      return TableModel.getColumnsByTable(this.aliasTableMap[alias]);
+    }
 
 });

http://git-wip-us.apache.org/repos/asf/kylin/blob/ed413ee3/webapp/app/partials/cubeDesigner/dimensions.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/cubeDesigner/dimensions.html b/webapp/app/partials/cubeDesigner/dimensions.html
index b8bd05f..e1833d3 100644
--- a/webapp/app/partials/cubeDesigner/dimensions.html
+++ b/webapp/app/partials/cubeDesigner/dimensions.html
@@ -177,7 +177,7 @@
                             </div>
                         </div>
                     </div>
-                    <div  ng-if="availableLookupTables.indexOf(newDimension.table)!=-1">
+                    <div  ng-if="modelsManager.availableLookupTables.indexOf(newDimension.table)!=-1">
                         <div  class="row">
                             <label class="control-label col-xs-12 col-sm-3 no-padding-right font-color-default">
                                 <b>Type</b>
@@ -217,7 +217,7 @@
                         <div class="box-body">
                           <ul class="list-unstyled columns-region">
                               <!--FactTable-->
-                              <div ng-repeat="table in availableFactTables track by $index"   class="panel-default " >
+                              <div ng-repeat="table in modelsManager.availableFactTables track by $index"   class="panel-default " >
                                 <h4>{{table}}&nbsp;[FactTable]</h4>
                                 <table class="table table-striped table-hover ng-scope">
                                   <tr >
@@ -243,7 +243,7 @@
                                 </table>
                               </div>
                               <!--LookUp Table-->
-                              <div ng-repeat="table in availableLookupTables track by $index"  class="panel-default" >
+                              <div ng-repeat="table in modelsManager.availableLookupTables track by $index"  class="panel-default" >
                                 <h4>{{table}}&nbsp;[LookupTable]</h4>
                                 <table class="table table-striped table-hover ng-scope">
                                   <tr class="row" >


[09/11] kylin git commit: Minor, layer cuboid folder name convension

Posted by li...@apache.org.
Minor, layer cuboid folder name convension


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/5d2e6c10
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/5d2e6c10
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/5d2e6c10

Branch: refs/heads/master-hbase0.98
Commit: 5d2e6c10c40c725c8e068242da5f719a71505099
Parents: e05d8f3
Author: shaofengshi <sh...@apache.org>
Authored: Mon Feb 20 23:11:53 2017 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Mon Feb 20 23:12:01 2017 +0800

----------------------------------------------------------------------
 .../java/org/apache/kylin/engine/mr/JobBuilderSupport.java     | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/5d2e6c10/engine-mr/src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java
index c34a904..c1ed345 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/JobBuilderSupport.java
@@ -45,6 +45,8 @@ public class JobBuilderSupport {
     final protected CubeSegment seg;
     final protected String submitter;
 
+    final public static String LayeredCuboidFolderPrefix = "level_";
+
     public JobBuilderSupport(CubeSegment seg, String submitter) {
         Preconditions.checkNotNull(seg, "segment cannot be null");
         this.config = new JobEngineConfig(seg.getConfig());
@@ -195,9 +197,9 @@ public class JobBuilderSupport {
 
     public static String getCuboidOutputPathsByLevel(String cuboidRootPath, int level) {
         if (level == 0) {
-            return cuboidRootPath + "base_cuboid";
+            return cuboidRootPath + LayeredCuboidFolderPrefix + "base_cuboid";
         } else {
-            return cuboidRootPath + "level_" + level + "_cuboid";
+            return cuboidRootPath + LayeredCuboidFolderPrefix + level + "_cuboid";
         }
     }
 


[10/11] kylin git commit: KYLIN-2307 Create a branch for master with HBase 0.98 API

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
index f63d9c2..b141190 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
@@ -26,13 +26,12 @@ import java.util.NoSuchElementException;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.BufferedMutator;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.kv.RowConstants;
@@ -87,13 +86,14 @@ public class SimpleHBaseStore implements IGTStore {
     }
 
     private class Writer implements IGTWriter {
-        final BufferedMutator table;
+        final HTableInterface table;
         final ByteBuffer rowkey = ByteBuffer.allocate(50);
         final ByteBuffer value = ByteBuffer.allocate(50);
 
         Writer() throws IOException {
-            Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-            table = conn.getBufferedMutator(htableName);
+            HConnection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+            table = conn.getTable(htableName);
+            table.setAutoFlush(false, true);
         }
 
         @Override
@@ -113,24 +113,24 @@ public class SimpleHBaseStore implements IGTStore {
 
             Put put = new Put(rowkey);
             put.addImmutable(CF_B, ByteBuffer.wrap(COL_B), HConstants.LATEST_TIMESTAMP, value);
-            table.mutate(put);
+            table.put(put);
         }
 
         @Override
         public void close() throws IOException {
-            table.flush();
+            table.flushCommits();
             table.close();
         }
     }
 
     class Reader implements IGTScanner {
-        final Table table;
+        final HTableInterface table;
         final ResultScanner scanner;
 
         int count = 0;
 
         Reader() throws IOException {
-            Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+            HConnection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
             table = conn.getTable(htableName);
 
             Scan scan = new Scan();

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
index abc3437..a251031 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
@@ -27,9 +27,8 @@ import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.zip.DataFormatException;
 
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
 import org.apache.hadoop.hbase.ipc.ServerRpcController;
@@ -53,10 +52,10 @@ import org.apache.kylin.storage.gtrecord.StorageResponseGTScatter;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest;
-import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse;
-import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse.Stats;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitService;
+import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList;
+import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse.Stats;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -118,7 +117,7 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
         final ImmutableBitSet selectedColBlocks = scanRequest.getSelectedColBlocks().set(0);
 
         // globally shared connection, does not require close
-        final Connection conn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
+        final HConnection conn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
 
         final List<IntList> hbaseColumnsToGTIntList = Lists.newArrayList();
         List<List<Integer>> hbaseColumnsToGT = getHBaseColumnsGTMapping(selectedColBlocks);
@@ -175,7 +174,7 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
                     final AtomicReference<RuntimeException> regionErrorHolder = new AtomicReference<>();
 
                     try {
-                        Table table = conn.getTable(TableName.valueOf(cubeSeg.getStorageLocationIdentifier()), HBaseConnection.getCoprocessorPool());
+                        HTableInterface table = conn.getTable(cubeSeg.getStorageLocationIdentifier(), HBaseConnection.getCoprocessorPool());
 
                         final CubeVisitRequest request = builder.build();
                         final byte[] startKey = epRange.getFirst();

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
index 1698180..b32f6b1 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
@@ -25,12 +25,11 @@ import java.util.List;
 
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.util.BytesUtil;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.common.util.ShardingHash;
@@ -156,8 +155,8 @@ public class CubeHBaseScanRPC extends CubeHBaseRPC {
         // primary key (also the 0th column block) is always selected
         final ImmutableBitSet selectedColBlocks = scanRequest.getSelectedColBlocks().set(0);
         // globally shared connection, does not require close
-        Connection hbaseConn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
-        final Table hbaseTable = hbaseConn.getTable(TableName.valueOf(cubeSeg.getStorageLocationIdentifier()));
+        HConnection hbaseConn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
+        final HTableInterface hbaseTable = hbaseConn.getTable(cubeSeg.getStorageLocationIdentifier());
 
         List<RawScan> rawScans = preparedHBaseScans(scanRequest.getGTScanRanges(), selectedColBlocks);
         List<List<Integer>> hbaseColumnsToGT = getHBaseColumnsGTMapping(selectedColBlocks);

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
index 61cf067..4d382b0 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
@@ -195,7 +195,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
         if (shardLength == 0) {
             return;
         }
-        byte[] regionStartKey = ArrayUtils.isEmpty(region.getRegionInfo().getStartKey()) ? new byte[shardLength] : region.getRegionInfo().getStartKey();
+        byte[] regionStartKey = ArrayUtils.isEmpty(region.getStartKey()) ? new byte[shardLength] : region.getStartKey();
         Bytes.putBytes(rawScan.startKey, 0, regionStartKey, 0, shardLength);
         Bytes.putBytes(rawScan.endKey, 0, regionStartKey, 0, shardLength);
     }
@@ -234,7 +234,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
         try (SetThreadName ignored = new SetThreadName("Query %s", queryId)) {
             this.serviceStartTime = System.currentTimeMillis();
 
-            region = (HRegion)env.getRegion();
+            region = env.getRegion();
             region.startRegionOperation();
 
             // if user change kylin.properties on kylin server, need to manually redeploy coprocessor jar to update KylinConfig of Env.

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
index feb4842..2814ad6 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
@@ -26,8 +26,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.regionserver.BloomType;
@@ -80,8 +79,7 @@ public class CubeHTableUtil {
         tableDesc.setValue(IRealizationConstants.HTableSegmentTag, cubeSegment.toString());
 
         Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
-        Connection conn = HBaseConnection.get(kylinConfig.getStorageUrl());
-        Admin admin = conn.getAdmin();
+        HBaseAdmin admin = new HBaseAdmin(conf);
 
         try {
             if (User.isHBaseSecurityEnabled(conf)) {
@@ -94,7 +92,7 @@ public class CubeHTableUtil {
                 tableDesc.addFamily(cf);
             }
 
-            if (admin.tableExists(TableName.valueOf(tableName))) {
+            if (admin.tableExists(tableName)) {
                 // admin.disableTable(tableName);
                 // admin.deleteTable(tableName);
                 throw new RuntimeException("HBase table " + tableName + " exists!");
@@ -103,7 +101,7 @@ public class CubeHTableUtil {
             DeployCoprocessorCLI.deployCoprocessor(tableDesc);
 
             admin.createTable(tableDesc, splitKeys);
-            Preconditions.checkArgument(admin.isTableAvailable(TableName.valueOf(tableName)), "table " + tableName + " created, but is not available due to some reasons");
+            Preconditions.checkArgument(admin.isTableAvailable(tableName), "table " + tableName + " created, but is not available due to some reasons");
             logger.info("create hbase table " + tableName + " done.");
         } finally {
             IOUtils.closeQuietly(admin);
@@ -112,7 +110,8 @@ public class CubeHTableUtil {
     }
 
     public static void deleteHTable(TableName tableName) throws IOException {
-        Admin admin = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl()).getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        HBaseAdmin admin = new HBaseAdmin(conf);
         try {
             if (admin.tableExists(tableName)) {
                 logger.info("disabling hbase table " + tableName);
@@ -127,7 +126,8 @@ public class CubeHTableUtil {
 
     /** create a HTable that has the same performance settings as normal cube table, for benchmark purpose */
     public static void createBenchmarkHTable(TableName tableName, String cfName) throws IOException {
-        Admin admin = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl()).getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        HBaseAdmin admin = new HBaseAdmin(conf);
         try {
             if (admin.tableExists(tableName)) {
                 logger.info("disabling hbase table " + tableName);

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
index df3cf08..eacff9f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
@@ -25,13 +25,13 @@ import java.util.List;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.HadoopUtil;
 import org.apache.kylin.common.util.HiveCmdBuilder;
 import org.apache.kylin.job.exception.ExecuteException;
@@ -100,21 +100,19 @@ public class DeprecatedGCStep extends AbstractExecutable {
         List<String> oldTables = getOldHTables();
         if (oldTables != null && oldTables.size() > 0) {
             String metadataUrlPrefix = KylinConfig.getInstanceFromEnv().getMetadataUrlPrefix();
-            Admin admin = null;
+            Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+            HBaseAdmin admin = null;
             try {
-
-                Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-                admin = conn.getAdmin();
-
+                admin = new HBaseAdmin(conf);
                 for (String table : oldTables) {
-                    if (admin.tableExists(TableName.valueOf(table))) {
-                        HTableDescriptor tableDescriptor = admin.getTableDescriptor(TableName.valueOf(table));
+                    if (admin.tableExists(table)) {
+                        HTableDescriptor tableDescriptor = admin.getTableDescriptor(Bytes.toBytes(table));
                         String host = tableDescriptor.getValue(IRealizationConstants.HTableTag);
                         if (metadataUrlPrefix.equalsIgnoreCase(host)) {
-                            if (admin.isTableEnabled(TableName.valueOf(table))) {
-                                admin.disableTable(TableName.valueOf(table));
+                            if (admin.isTableEnabled(table)) {
+                                admin.disableTable(table);
                             }
-                            admin.deleteTable(TableName.valueOf(table));
+                            admin.deleteTable(table);
                             logger.debug("Dropped HBase table " + table);
                             output.append("Dropped HBase table " + table + " \n");
                         } else {

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
index 6587d4e..d5b36df 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
@@ -23,8 +23,8 @@ import java.util.List;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.cube.cuboid.Cuboid;
@@ -49,7 +49,7 @@ public class HBaseCuboidWriter implements ICuboidWriter {
 
     private final List<KeyValueCreator> keyValueCreators;
     private final int nColumns;
-    private final Table hTable;
+    private final HTableInterface hTable;
     private final CubeDesc cubeDesc;
     private final CubeSegment cubeSegment;
     private final Object[] measureValues;
@@ -58,7 +58,7 @@ public class HBaseCuboidWriter implements ICuboidWriter {
     private AbstractRowKeyEncoder rowKeyEncoder;
     private byte[] keybuf;
 
-    public HBaseCuboidWriter(CubeSegment segment, Table hTable) {
+    public HBaseCuboidWriter(CubeSegment segment, HTableInterface hTable) {
         this.keyValueCreators = Lists.newArrayList();
         this.cubeSegment = segment;
         this.cubeDesc = cubeSegment.getCubeDesc();
@@ -117,6 +117,7 @@ public class HBaseCuboidWriter implements ICuboidWriter {
             long t = System.currentTimeMillis();
             if (hTable != null) {
                 hTable.put(puts);
+                hTable.flushCommits();
             }
             logger.info("commit total " + puts.size() + " puts, totally cost:" + (System.currentTimeMillis() - t) + "ms");
             puts.clear();

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
index 2f7e164..5b2441c 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
@@ -24,11 +24,11 @@ import java.util.Collections;
 import java.util.List;
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.job.exception.ExecuteException;
 import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableContext;
@@ -69,20 +69,19 @@ public class MergeGCStep extends AbstractExecutable {
         List<String> oldTables = getOldHTables();
         if (oldTables != null && oldTables.size() > 0) {
             String metadataUrlPrefix = KylinConfig.getInstanceFromEnv().getMetadataUrlPrefix();
-            Admin admin = null;
+            Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+            HBaseAdmin admin = null;
             try {
-                Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-                admin = conn.getAdmin();
-
+                admin = new HBaseAdmin(conf);
                 for (String table : oldTables) {
-                    if (admin.tableExists(TableName.valueOf(table))) {
-                        HTableDescriptor tableDescriptor = admin.getTableDescriptor(TableName.valueOf((table)));
+                    if (admin.tableExists(table)) {
+                        HTableDescriptor tableDescriptor = admin.getTableDescriptor(Bytes.toBytes(table));
                         String host = tableDescriptor.getValue(IRealizationConstants.HTableTag);
                         if (metadataUrlPrefix.equalsIgnoreCase(host)) {
-                            if (admin.isTableEnabled(TableName.valueOf(table))) {
-                                admin.disableTable(TableName.valueOf(table));
+                            if (admin.isTableEnabled(table)) {
+                                admin.disableTable(table);
                             }
-                            admin.deleteTable(TableName.valueOf(table));
+                            admin.deleteTable(table);
                             logger.debug("Dropped htable: " + table);
                             output.append("HBase table " + table + " is dropped. \n");
                         } else {

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
index 56f867a..a150607 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
@@ -21,11 +21,9 @@ package org.apache.kylin.storage.hbase.util;
 import java.io.IOException;
 
 import org.apache.commons.cli.Options;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
@@ -40,8 +38,8 @@ public class CleanHtableCLI extends AbstractApplication {
     protected static final Logger logger = LoggerFactory.getLogger(CleanHtableCLI.class);
 
     private void clean() throws IOException {
-        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-        Admin hbaseAdmin = conn.getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
 
         for (HTableDescriptor descriptor : hbaseAdmin.listTables()) {
             String name = descriptor.getNameAsString().toLowerCase();
@@ -52,7 +50,7 @@ public class CleanHtableCLI extends AbstractApplication {
                 System.out.println();
 
                 descriptor.setValue(IRealizationConstants.HTableOwner, "DL-eBay-Kylin@ebay.com");
-                hbaseAdmin.modifyTable(TableName.valueOf(descriptor.getNameAsString()), descriptor);
+                hbaseAdmin.modifyTable(descriptor.getNameAsString(), descriptor);
             }
         }
         hbaseAdmin.close();

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
index 581de38..68c0a39 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
@@ -26,19 +26,19 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.RawResource;
@@ -89,7 +89,7 @@ public class CubeMigrationCLI {
     private static ResourceStore srcStore;
     private static ResourceStore dstStore;
     private static FileSystem hdfsFS;
-    private static Admin hbaseAdmin;
+    private static HBaseAdmin hbaseAdmin;
 
     public static final String ACL_INFO_FAMILY = "i";
     private static final String ACL_TABLE_NAME = "_acl";
@@ -134,8 +134,8 @@ public class CubeMigrationCLI {
 
         checkAndGetHbaseUrl();
 
-        Connection conn = HBaseConnection.get(srcConfig.getStorageUrl());
-        hbaseAdmin = conn.getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        hbaseAdmin = new HBaseAdmin(conf);
 
         hdfsFS = HadoopUtil.getWorkingFileSystem();
 
@@ -233,7 +233,6 @@ public class CubeMigrationCLI {
             operations.add(new Opt(OptType.COPY_DICT_OR_SNAPSHOT, new Object[] { item, cube.getName() }));
         }
     }
-
     private static void addCubeAndModelIntoProject(CubeInstance srcCube, String cubeName, String projectName) throws IOException {
         String projectResPath = ProjectInstance.concatResourcePath(projectName);
         if (!dstStore.exists(projectResPath))
@@ -327,8 +326,8 @@ public class CubeMigrationCLI {
 
         switch (opt.type) {
         case CHANGE_HTABLE_HOST: {
-            TableName tableName = TableName.valueOf((String) opt.params[0]);
-            HTableDescriptor desc = hbaseAdmin.getTableDescriptor(tableName);
+            String tableName = (String) opt.params[0];
+            HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
             hbaseAdmin.disableTable(tableName);
             desc.setValue(IRealizationConstants.HTableTag, dstConfig.getMetadataUrlPrefix());
             hbaseAdmin.modifyTable(tableName, desc);
@@ -450,11 +449,11 @@ public class CubeMigrationCLI {
             Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
             ProjectInstance project = dstStore.getResource(projectResPath, ProjectInstance.class, projectSerializer);
             String projUUID = project.getUuid();
-            Table srcAclHtable = null;
-            Table destAclHtable = null;
+            HTableInterface srcAclHtable = null;
+            HTableInterface destAclHtable = null;
             try {
-                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(TableName.valueOf(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
 
                 // cube acl
                 Result result = srcAclHtable.get(new Get(Bytes.toBytes(cubeId)));
@@ -474,6 +473,7 @@ public class CubeMigrationCLI {
                         destAclHtable.put(put);
                     }
                 }
+                destAclHtable.flushCommits();
             } finally {
                 IOUtils.closeQuietly(srcAclHtable);
                 IOUtils.closeQuietly(destAclHtable);
@@ -504,8 +504,8 @@ public class CubeMigrationCLI {
 
         switch (opt.type) {
         case CHANGE_HTABLE_HOST: {
-            TableName tableName = TableName.valueOf((String) opt.params[0]);
-            HTableDescriptor desc = hbaseAdmin.getTableDescriptor(tableName);
+            String tableName = (String) opt.params[0];
+            HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
             hbaseAdmin.disableTable(tableName);
             desc.setValue(IRealizationConstants.HTableTag, srcConfig.getMetadataUrlPrefix());
             hbaseAdmin.modifyTable(tableName, desc);
@@ -539,12 +539,13 @@ public class CubeMigrationCLI {
         case COPY_ACL: {
             String cubeId = (String) opt.params[0];
             String modelId = (String) opt.params[1];
-            Table destAclHtable = null;
+            HTableInterface destAclHtable = null;
             try {
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
 
                 destAclHtable.delete(new Delete(Bytes.toBytes(cubeId)));
                 destAclHtable.delete(new Delete(Bytes.toBytes(modelId)));
+                destAclHtable.flushCommits();
             } finally {
                 IOUtils.closeQuietly(destAclHtable);
             }
@@ -561,7 +562,7 @@ public class CubeMigrationCLI {
         }
     }
 
-    private static void updateMeta(KylinConfig config) {
+    private static void updateMeta(KylinConfig config){
         String[] nodes = config.getRestServers();
         for (String node : nodes) {
             RestClient restClient = new RestClient(node);

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
index 20d0f7d..8bd4abf 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
@@ -26,10 +26,10 @@ import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.cube.CubeInstance;
@@ -61,7 +61,7 @@ public class CubeMigrationCheckCLI {
     private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("The name of cube migrated").create("cube");
 
     private KylinConfig dstCfg;
-    private Admin hbaseAdmin;
+    private HBaseAdmin hbaseAdmin;
 
     private List<String> issueExistHTables;
     private List<String> inconsistentHTables;
@@ -130,8 +130,9 @@ public class CubeMigrationCheckCLI {
         this.dstCfg = kylinConfig;
         this.ifFix = isFix;
 
-        Connection conn = HBaseConnection.get(kylinConfig.getStorageUrl());
-        hbaseAdmin = conn.getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        hbaseAdmin = new HBaseAdmin(conf);
+
         issueExistHTables = Lists.newArrayList();
         inconsistentHTables = Lists.newArrayList();
     }
@@ -188,10 +189,10 @@ public class CubeMigrationCheckCLI {
                 String[] sepNameList = segFullName.split(",");
                 HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0]));
                 logger.info("Change the host of htable " + sepNameList[0] + "belonging to cube " + sepNameList[1] + " from " + desc.getValue(IRealizationConstants.HTableTag) + " to " + dstCfg.getMetadataUrlPrefix());
-                hbaseAdmin.disableTable(TableName.valueOf(sepNameList[0]));
+                hbaseAdmin.disableTable(sepNameList[0]);
                 desc.setValue(IRealizationConstants.HTableTag, dstCfg.getMetadataUrlPrefix());
-                hbaseAdmin.modifyTable(TableName.valueOf(sepNameList[0]), desc);
-                hbaseAdmin.enableTable(TableName.valueOf(sepNameList[0]));
+                hbaseAdmin.modifyTable(sepNameList[0], desc);
+                hbaseAdmin.enableTable(sepNameList[0]);
             }
         } else {
             logger.info("------ Inconsistent HTables Needed To Be Fixed ------");

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
index 29c738e..c9e969f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
@@ -44,8 +44,7 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.KylinVersion;
@@ -86,8 +85,7 @@ public class DeployCoprocessorCLI {
         KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
         FileSystem fileSystem = FileSystem.get(hconf);
-        Connection conn = HBaseConnection.get(kylinConfig.getStorageUrl());
-        Admin hbaseAdmin = conn.getAdmin();
+        HBaseAdmin hbaseAdmin = new HBaseAdmin(hconf);
 
         String localCoprocessorJar;
         if ("default".equals(args[0])) {
@@ -205,10 +203,10 @@ public class DeployCoprocessorCLI {
     public static void deployCoprocessor(HTableDescriptor tableDesc) {
         try {
             initHTableCoprocessor(tableDesc);
-            logger.info("hbase table " + tableDesc.getTableName() + " deployed with coprocessor.");
+            logger.info("hbase table " + tableDesc.getName() + " deployed with coprocessor.");
 
         } catch (Exception ex) {
-            logger.error("Error deploying coprocessor on " + tableDesc.getTableName(), ex);
+            logger.error("Error deploying coprocessor on " + tableDesc.getName(), ex);
             logger.error("Will try creating the table without coprocessor.");
         }
     }
@@ -229,7 +227,7 @@ public class DeployCoprocessorCLI {
         desc.addCoprocessor(CubeEndpointClass, hdfsCoprocessorJar, 1001, null);
     }
 
-    public static boolean resetCoprocessor(String tableName, Admin hbaseAdmin, Path hdfsCoprocessorJar) throws IOException {
+    public static boolean resetCoprocessor(String tableName, HBaseAdmin hbaseAdmin, Path hdfsCoprocessorJar) throws IOException {
         KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
 
@@ -244,7 +242,7 @@ public class DeployCoprocessorCLI {
         logger.info("reset coprocessor on " + tableName);
 
         logger.info("Disable " + tableName);
-        hbaseAdmin.disableTable(TableName.valueOf(tableName));
+        hbaseAdmin.disableTable(tableName);
 
         while (desc.hasCoprocessor(CubeObserverClassOld2)) {
             desc.removeCoprocessor(CubeObserverClassOld2);
@@ -270,15 +268,16 @@ public class DeployCoprocessorCLI {
             desc.setValue(IRealizationConstants.HTableGitTag, commitInfo);
         }
 
-        hbaseAdmin.modifyTable(TableName.valueOf(tableName), desc);
+        hbaseAdmin.modifyTable(tableName, desc);
 
         logger.info("Enable " + tableName);
-        hbaseAdmin.enableTable(TableName.valueOf(tableName));
+        hbaseAdmin.enableTable(tableName);
 
         return true;
     }
 
-    private static List<String> resetCoprocessorOnHTables(final Admin hbaseAdmin, final Path hdfsCoprocessorJar, List<String> tableNames) throws IOException {
+
+    private static List<String> resetCoprocessorOnHTables(final HBaseAdmin hbaseAdmin, final Path hdfsCoprocessorJar, List<String> tableNames) throws IOException {
         List<String> processedTables = Collections.synchronizedList(new ArrayList<String>());
         ExecutorService coprocessorPool = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() * 2);
         CountDownLatch countDownLatch = new CountDownLatch(tableNames.size());
@@ -299,12 +298,12 @@ public class DeployCoprocessorCLI {
 
     private static class ResetCoprocessorWorker implements Runnable {
         private final CountDownLatch countDownLatch;
-        private final Admin hbaseAdmin;
+        private final HBaseAdmin hbaseAdmin;
         private final Path hdfsCoprocessorJar;
         private final String tableName;
         private final List<String> processedTables;
 
-        public ResetCoprocessorWorker(CountDownLatch countDownLatch, Admin hbaseAdmin, Path hdfsCoprocessorJar, String tableName, List<String> processedTables) {
+        public ResetCoprocessorWorker(CountDownLatch countDownLatch, HBaseAdmin hbaseAdmin, Path hdfsCoprocessorJar, String tableName, List<String> processedTables) {
             this.countDownLatch = countDownLatch;
             this.hbaseAdmin = hbaseAdmin;
             this.hdfsCoprocessorJar = hdfsCoprocessorJar;
@@ -425,7 +424,7 @@ public class DeployCoprocessorCLI {
         return coprocessorDir;
     }
 
-    private static Set<String> getCoprocessorJarPaths(Admin hbaseAdmin, List<String> tableNames) throws IOException {
+    private static Set<String> getCoprocessorJarPaths(HBaseAdmin hbaseAdmin, List<String> tableNames) throws IOException {
         HashSet<String> result = new HashSet<String>();
 
         for (String tableName : tableNames) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
index 1cdb2f8..61c73d5 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
@@ -25,11 +25,10 @@ import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -236,9 +235,9 @@ public class ExtendCubeToHybridCLI {
         Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
         ProjectInstance project = store.getResource(projectResPath, ProjectInstance.class, projectSerializer);
         String projUUID = project.getUuid();
-        Table aclHtable = null;
+        HTableInterface aclHtable = null;
         try {
-            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(TableName.valueOf(kylinConfig.getMetadataUrlPrefix() + "_acl"));
+            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(kylinConfig.getMetadataUrlPrefix() + "_acl");
 
             // cube acl
             Result result = aclHtable.get(new Get(Bytes.toBytes(origCubeId)));
@@ -258,6 +257,7 @@ public class ExtendCubeToHybridCLI {
                     aclHtable.put(put);
                 }
             }
+            aclHtable.flushCommits();
         } finally {
             IOUtils.closeQuietly(aclHtable);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
index dd5f8fa..86ba22f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
@@ -28,13 +28,13 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.Pair;
@@ -75,7 +75,7 @@ public class GridTableHBaseBenchmark {
         System.out.println("Testing grid table scanning, hit ratio " + hitRatio + ", index ratio " + indexRatio);
         String hbaseUrl = "hbase"; // use hbase-site.xml on classpath
 
-        Connection conn = HBaseConnection.get(hbaseUrl);
+        HConnection conn = HBaseConnection.get(hbaseUrl);
         createHTableIfNeeded(conn, TEST_TABLE);
         prepareData(conn);
 
@@ -91,10 +91,10 @@ public class GridTableHBaseBenchmark {
 
     }
 
-    private static void testColumnScan(Connection conn, List<Pair<Integer, Integer>> colScans) throws IOException {
+    private static void testColumnScan(HConnection conn, List<Pair<Integer, Integer>> colScans) throws IOException {
         Stats stats = new Stats("COLUMN_SCAN");
 
-        Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
+        HTableInterface table = conn.getTable(TEST_TABLE);
         try {
             stats.markStart();
 
@@ -122,20 +122,20 @@ public class GridTableHBaseBenchmark {
         }
     }
 
-    private static void testRowScanNoIndexFullScan(Connection conn, boolean[] hits) throws IOException {
+    private static void testRowScanNoIndexFullScan(HConnection conn, boolean[] hits) throws IOException {
         fullScan(conn, hits, new Stats("ROW_SCAN_NO_IDX_FULL"));
     }
 
-    private static void testRowScanNoIndexSkipScan(Connection conn, boolean[] hits) throws IOException {
+    private static void testRowScanNoIndexSkipScan(HConnection conn, boolean[] hits) throws IOException {
         jumpScan(conn, hits, new Stats("ROW_SCAN_NO_IDX_SKIP"));
     }
 
-    private static void testRowScanWithIndex(Connection conn, boolean[] hits) throws IOException {
+    private static void testRowScanWithIndex(HConnection conn, boolean[] hits) throws IOException {
         jumpScan(conn, hits, new Stats("ROW_SCAN_IDX"));
     }
 
-    private static void fullScan(Connection conn, boolean[] hits, Stats stats) throws IOException {
-        Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
+    private static void fullScan(HConnection conn, boolean[] hits, Stats stats) throws IOException {
+        HTableInterface table = conn.getTable(TEST_TABLE);
         try {
             stats.markStart();
 
@@ -156,11 +156,11 @@ public class GridTableHBaseBenchmark {
         }
     }
 
-    private static void jumpScan(Connection conn, boolean[] hits, Stats stats) throws IOException {
+    private static void jumpScan(HConnection conn, boolean[] hits, Stats stats) throws IOException {
 
         final int jumpThreshold = 6; // compensate for Scan() overhead, totally by experience
 
-        Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
+        HTableInterface table = conn.getTable(TEST_TABLE);
         try {
 
             stats.markStart();
@@ -204,8 +204,8 @@ public class GridTableHBaseBenchmark {
         }
     }
 
-    private static void prepareData(Connection conn) throws IOException {
-        Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
+    private static void prepareData(HConnection conn) throws IOException {
+        HTableInterface table = conn.getTable(TEST_TABLE);
 
         try {
             // check how many rows existing
@@ -258,8 +258,8 @@ public class GridTableHBaseBenchmark {
         return bytes;
     }
 
-    private static void createHTableIfNeeded(Connection conn, String tableName) throws IOException {
-        Admin hbase = conn.getAdmin();
+    private static void createHTableIfNeeded(HConnection conn, String tableName) throws IOException {
+        HBaseAdmin hbase = new HBaseAdmin(conn);
 
         try {
             boolean tableExist = false;

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
index 940d64a..6749d6c 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
@@ -24,11 +24,9 @@ import java.util.List;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
@@ -57,8 +55,8 @@ public class HBaseClean extends AbstractApplication {
     private void cleanUp() {
         try {
             // get all kylin hbase tables
-            Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-            Admin hbaseAdmin = conn.getAdmin();
+            Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+            HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
             String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
             HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
             List<String> allTablesNeedToBeDropped = Lists.newArrayList();
@@ -73,12 +71,12 @@ public class HBaseClean extends AbstractApplication {
                 // drop tables
                 for (String htableName : allTablesNeedToBeDropped) {
                     logger.info("Deleting HBase table " + htableName);
-                    if (hbaseAdmin.tableExists(TableName.valueOf(htableName))) {
-                        if (hbaseAdmin.isTableEnabled(TableName.valueOf(htableName))) {
-                            hbaseAdmin.disableTable(TableName.valueOf(htableName));
+                    if (hbaseAdmin.tableExists(htableName)) {
+                        if (hbaseAdmin.isTableEnabled(htableName)) {
+                            hbaseAdmin.disableTable(htableName);
                         }
 
-                        hbaseAdmin.deleteTable(TableName.valueOf(htableName));
+                        hbaseAdmin.deleteTable(htableName);
                         logger.info("Deleted HBase table " + htableName);
                     } else {
                         logger.info("HBase table" + htableName + " does not exist");

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
index 1daca0a..937b65f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
@@ -23,7 +23,6 @@ import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
@@ -32,15 +31,12 @@ import java.util.TreeSet;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.ClusterStatus;
-import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.RegionLoad;
 import org.apache.hadoop.hbase.ServerLoad;
 import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.RegionLocator;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.kylin.common.util.Pair;
 import org.slf4j.Logger;
@@ -62,31 +58,30 @@ public class HBaseRegionSizeCalculator {
     /**
      * Computes size of each region for table and given column families.
      * */
-    public HBaseRegionSizeCalculator(String tableName, Connection hbaseConnection) throws IOException {
+    public HBaseRegionSizeCalculator(HTable table) throws IOException {
+        this(table, new HBaseAdmin(table.getConfiguration()));
+    }
 
-        Table table = null;
-        Admin admin = null;
-        try {
-            table = hbaseConnection.getTable(TableName.valueOf(tableName));
-            admin = hbaseConnection.getAdmin();
+    /** Constructor for unit testing */
+    HBaseRegionSizeCalculator(HTable table, HBaseAdmin hBaseAdmin) throws IOException {
 
+        try {
             if (!enabled(table.getConfiguration())) {
                 logger.info("Region size calculation disabled.");
                 return;
             }
 
-            logger.info("Calculating region sizes for table \"" + table.getName() + "\".");
+            logger.info("Calculating region sizes for table \"" + new String(table.getTableName()) + "\".");
 
             // Get regions for table.
-            RegionLocator regionLocator = hbaseConnection.getRegionLocator(table.getName());
-            List<HRegionLocation> regionLocationList = regionLocator.getAllRegionLocations();
+            Set<HRegionInfo> tableRegionInfos = table.getRegionLocations().keySet();
             Set<byte[]> tableRegions = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
 
-            for (HRegionLocation hRegionLocation : regionLocationList) {
-                tableRegions.add(hRegionLocation.getRegionInfo().getRegionName());
+            for (HRegionInfo regionInfo : tableRegionInfos) {
+                tableRegions.add(regionInfo.getRegionName());
             }
 
-            ClusterStatus clusterStatus = admin.getClusterStatus();
+            ClusterStatus clusterStatus = hBaseAdmin.getClusterStatus();
             Collection<ServerName> servers = clusterStatus.getServers();
             final long megaByte = 1024L * 1024L;
 
@@ -110,7 +105,7 @@ public class HBaseRegionSizeCalculator {
                 }
             }
         } finally {
-            IOUtils.closeQuietly(admin);
+            IOUtils.closeQuietly(hBaseAdmin);
         }
 
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
index a2f60d4..266f7e7 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
@@ -23,10 +23,9 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 
@@ -43,8 +42,8 @@ public class HBaseUsage {
         Map<String, List<String>> envs = Maps.newHashMap();
 
         // get all kylin hbase tables
-        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-        Admin hbaseAdmin = conn.getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
         String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
         HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
         for (HTableDescriptor desc : tableDescriptors) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
index 8dd2164..1db60fb 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
@@ -32,15 +32,15 @@ import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
-import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
@@ -58,11 +58,11 @@ public class HbaseStreamingInput {
     private static final byte[] QN = "C".getBytes();
 
     public static void createTable(String tableName) throws IOException {
-        Connection conn = getConnection();
-        Admin hadmin = conn.getAdmin();
+        HConnection conn = getConnection();
+        HBaseAdmin hadmin = new HBaseAdmin(conn);
 
         try {
-            boolean tableExist = hadmin.tableExists(TableName.valueOf(tableName));
+            boolean tableExist = hadmin.tableExists(tableName);
             if (tableExist) {
                 logger.info("HTable '" + tableName + "' already exists");
                 return;
@@ -120,8 +120,8 @@ public class HbaseStreamingInput {
                 e.printStackTrace();
             }
 
-            Connection conn = getConnection();
-            Table table = conn.getTable(TableName.valueOf(tableName));
+            HConnection conn = getConnection();
+            HTableInterface table = conn.getTable(tableName);
 
             byte[] key = new byte[8 + 4];//time + id
 
@@ -136,7 +136,7 @@ public class HbaseStreamingInput {
                 Bytes.putInt(key, 8, i);
                 Put put = new Put(key);
                 byte[] cell = randomBytes(CELL_SIZE);
-                put.addColumn(CF, QN, cell);
+                put.add(CF, QN, cell);
                 buffer.add(put);
             }
             table.put(buffer);
@@ -172,8 +172,8 @@ public class HbaseStreamingInput {
             }
 
             Random r = new Random();
-            Connection conn = getConnection();
-            Table table = conn.getTable(TableName.valueOf(tableName));
+            HConnection conn = getConnection();
+            HTableInterface table = conn.getTable(tableName);
 
             long leftBound = getFirstKeyTime(table);
             long rightBound = System.currentTimeMillis();
@@ -208,7 +208,7 @@ public class HbaseStreamingInput {
         }
     }
 
-    private static long getFirstKeyTime(Table table) throws IOException {
+    private static long getFirstKeyTime(HTableInterface table) throws IOException {
         long startTime = 0;
 
         Scan scan = new Scan();
@@ -226,8 +226,8 @@ public class HbaseStreamingInput {
 
     }
 
-    private static Connection getConnection() throws IOException {
-        return HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+    private static HConnection getConnection() throws IOException {
+        return HConnectionManager.createConnection(HBaseConnection.getCurrentHBaseConfiguration());
     }
 
     private static String formatTime(long time) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
index ea05ab2..ca1a060 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
@@ -23,11 +23,10 @@ import java.io.IOException;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.engine.mr.common.BatchConstants;
@@ -51,8 +50,8 @@ public class HtableAlterMetadataCLI extends AbstractApplication {
     String metadataValue;
 
     private void alter() throws IOException {
-        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-        Admin hbaseAdmin = conn.getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
         HTableDescriptor table = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
 
         hbaseAdmin.disableTable(table.getTableName());

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
index df4e912..8ff5b0f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
@@ -30,14 +30,10 @@ import org.apache.commons.cli.Options;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
-import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -56,9 +52,9 @@ public class OrphanHBaseCleanJob extends AbstractApplication {
     Set<String> metastoreWhitelistSet = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
 
     private void cleanUnusedHBaseTables(Configuration conf) throws IOException {
-        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+
         // get all kylin hbase tables
-        Admin hbaseAdmin = conn.getAdmin();
+        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
         String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
         HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
         List<String> allTablesNeedToBeDropped = new ArrayList<String>();
@@ -77,13 +73,12 @@ public class OrphanHBaseCleanJob extends AbstractApplication {
             // drop tables
             for (String htableName : allTablesNeedToBeDropped) {
                 logger.info("Deleting HBase table " + htableName);
-                TableName tableName = TableName.valueOf(htableName);
-                if (hbaseAdmin.tableExists(tableName)) {
-                    if (hbaseAdmin.isTableEnabled(tableName)) {
-                        hbaseAdmin.disableTable(tableName);
+                if (hbaseAdmin.tableExists(htableName)) {
+                    if (hbaseAdmin.isTableEnabled(htableName)) {
+                        hbaseAdmin.disableTable(htableName);
                     }
 
-                    hbaseAdmin.deleteTable(tableName);
+                    hbaseAdmin.deleteTable(htableName);
                     logger.info("Deleted HBase table " + htableName);
                 } else {
                     logger.info("HBase table" + htableName + " does not exist");

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
index bba6745..1ea8e8d 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
@@ -22,13 +22,12 @@ import java.io.IOException;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.token.TokenUtil;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -60,12 +59,12 @@ public class PingHBaseCLI {
         Scan scan = new Scan();
         int limit = 20;
 
-        Connection conn = null;
-        Table table = null;
+        HConnection conn = null;
+        HTableInterface table = null;
         ResultScanner scanner = null;
         try {
-            conn = ConnectionFactory.createConnection(hconf);
-            table = conn.getTable(TableName.valueOf(hbaseTable));
+            conn = HConnectionManager.createConnection(hconf);
+            table = conn.getTable(hbaseTable);
             scanner = table.getScanner(scan);
             int count = 0;
             for (Result r : scanner) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
index db516bb..01edb1f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
@@ -22,12 +22,11 @@ import java.io.IOException;
 import java.util.Iterator;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.BytesUtil;
 import org.apache.kylin.storage.hbase.HBaseConnection;
@@ -71,8 +70,8 @@ public class RowCounterCLI {
 
         logger.info("My Scan " + scan.toString());
 
-        Connection conn = ConnectionFactory.createConnection(conf);
-        Table tableInterface = conn.getTable(TableName.valueOf(htableName));
+        HConnection conn = HConnectionManager.createConnection(conf);
+        HTableInterface tableInterface = conn.getTable(htableName);
 
         Iterator<Result> iterator = tableInterface.getScanner(scan).iterator();
         int counter = 0;

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
index f6b65ab..23e7e10 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
@@ -40,9 +40,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.CliCommandExecutor;
@@ -59,7 +57,6 @@ import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableManager;
 import org.apache.kylin.job.execution.ExecutableState;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
-import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -80,8 +77,7 @@ public class StorageCleanupJob extends AbstractApplication {
     private void cleanUnusedHBaseTables(Configuration conf) throws IOException {
         CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
         // get all kylin hbase tables
-        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-        Admin hbaseAdmin = conn.getAdmin();
+        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
         String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
         HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
         List<String> allTablesNeedToBeDropped = new ArrayList<String>();
@@ -157,22 +153,22 @@ public class StorageCleanupJob extends AbstractApplication {
     }
 
     class DeleteHTableRunnable implements Callable {
-        Admin hbaseAdmin;
+        HBaseAdmin hbaseAdmin;
         String htableName;
 
-        DeleteHTableRunnable(Admin hbaseAdmin, String htableName) {
+        DeleteHTableRunnable(HBaseAdmin hbaseAdmin, String htableName) {
             this.hbaseAdmin = hbaseAdmin;
             this.htableName = htableName;
         }
 
         public Object call() throws Exception {
             logger.info("Deleting HBase table " + htableName);
-            if (hbaseAdmin.tableExists(TableName.valueOf(htableName))) {
-                if (hbaseAdmin.isTableEnabled(TableName.valueOf(htableName))) {
-                    hbaseAdmin.disableTable(TableName.valueOf(htableName));
+            if (hbaseAdmin.tableExists(htableName)) {
+                if (hbaseAdmin.isTableEnabled(htableName)) {
+                    hbaseAdmin.disableTable(htableName);
                 }
 
-                hbaseAdmin.deleteTable(TableName.valueOf(htableName));
+                hbaseAdmin.deleteTable(htableName);
                 logger.info("Deleted HBase table " + htableName);
             } else {
                 logger.info("HBase table" + htableName + " does not exist");

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
index 42a54c8..e36f662 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
@@ -24,18 +24,16 @@ import java.util.Arrays;
 import java.util.List;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -51,15 +49,14 @@ public class UpdateHTableHostCLI {
     private List<String> errorMsgs = Lists.newArrayList();
 
     private List<String> htables;
-    private Admin hbaseAdmin;
+    private HBaseAdmin hbaseAdmin;
     private KylinConfig kylinConfig;
     private String oldHostValue;
 
     public UpdateHTableHostCLI(List<String> htables, String oldHostValue) throws IOException {
         this.htables = htables;
         this.oldHostValue = oldHostValue;
-        Connection conn = ConnectionFactory.createConnection(HBaseConfiguration.create());
-        hbaseAdmin = conn.getAdmin();
+        this.hbaseAdmin = new HBaseAdmin(HBaseConnection.getCurrentHBaseConfiguration());
         this.kylinConfig = KylinConfig.getInstanceFromEnv();
     }
 
@@ -169,9 +166,9 @@ public class UpdateHTableHostCLI {
         HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
         if (oldHostValue.equals(desc.getValue(IRealizationConstants.HTableTag))) {
             desc.setValue(IRealizationConstants.HTableTag, kylinConfig.getMetadataUrlPrefix());
-            hbaseAdmin.disableTable(TableName.valueOf(tableName));
-            hbaseAdmin.modifyTable(TableName.valueOf(tableName), desc);
-            hbaseAdmin.enableTable(TableName.valueOf(tableName));
+            hbaseAdmin.disableTable(tableName);
+            hbaseAdmin.modifyTable(tableName, desc);
+            hbaseAdmin.enableTable(tableName);
 
             updatedResources.add(tableName);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/tool/pom.xml
----------------------------------------------------------------------
diff --git a/tool/pom.xml b/tool/pom.xml
index 278c2b8..072ee44 100644
--- a/tool/pom.xml
+++ b/tool/pom.xml
@@ -60,16 +60,6 @@
             <artifactId>hbase-client</artifactId>
             <scope>provided</scope>
         </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-yarn-api</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-yarn-common</artifactId>
-            <scope>provided</scope>
-        </dependency>
 
         <!-- Env & Test -->
         <dependency>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
index c0042f3..c8bff89 100644
--- a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
@@ -36,9 +36,9 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.RawResource;
@@ -231,7 +231,6 @@ public class CubeMigrationCLI {
             operations.add(new Opt(OptType.COPY_DICT_OR_SNAPSHOT, new Object[] { item, cube.getName() }));
         }
     }
-
     private static void addCubeAndModelIntoProject(CubeInstance srcCube, String cubeName, String projectName) throws IOException {
         String projectResPath = ProjectInstance.concatResourcePath(projectName);
         if (!dstStore.exists(projectResPath))
@@ -448,11 +447,11 @@ public class CubeMigrationCLI {
             Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
             ProjectInstance project = dstStore.getResource(projectResPath, ProjectInstance.class, projectSerializer);
             String projUUID = project.getUuid();
-            Table srcAclHtable = null;
-            Table destAclHtable = null;
+            HTableInterface srcAclHtable = null;
+            HTableInterface destAclHtable = null;
             try {
-                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(TableName.valueOf(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
 
                 // cube acl
                 Result result = srcAclHtable.get(new Get(Bytes.toBytes(cubeId)));
@@ -472,6 +471,7 @@ public class CubeMigrationCLI {
                         destAclHtable.put(put);
                     }
                 }
+                destAclHtable.flushCommits();
             } finally {
                 IOUtils.closeQuietly(srcAclHtable);
                 IOUtils.closeQuietly(destAclHtable);
@@ -537,12 +537,13 @@ public class CubeMigrationCLI {
         case COPY_ACL: {
             String cubeId = (String) opt.params[0];
             String modelId = (String) opt.params[1];
-            Table destAclHtable = null;
+            HTableInterface destAclHtable = null;
             try {
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
 
                 destAclHtable.delete(new Delete(Bytes.toBytes(cubeId)));
                 destAclHtable.delete(new Delete(Bytes.toBytes(modelId)));
+                destAclHtable.flushCommits();
             } finally {
                 IOUtils.closeQuietly(destAclHtable);
             }
@@ -559,7 +560,7 @@ public class CubeMigrationCLI {
         }
     }
 
-    private static void updateMeta(KylinConfig config) {
+    private static void updateMeta(KylinConfig config){
         String[] nodes = config.getRestServers();
         for (String node : nodes) {
             RestClient restClient = new RestClient(node);

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java b/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
index f52fc3e..19e5db0 100644
--- a/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
@@ -25,11 +25,10 @@ import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -232,9 +231,9 @@ public class ExtendCubeToHybridCLI {
         Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
         ProjectInstance project = store.getResource(projectResPath, ProjectInstance.class, projectSerializer);
         String projUUID = project.getUuid();
-        Table aclHtable = null;
+        HTableInterface aclHtable = null;
         try {
-            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(TableName.valueOf(kylinConfig.getMetadataUrlPrefix() + "_acl"));
+            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(kylinConfig.getMetadataUrlPrefix() + "_acl");
 
             // cube acl
             Result result = aclHtable.get(new Get(Bytes.toBytes(origCubeId)));
@@ -254,6 +253,7 @@ public class ExtendCubeToHybridCLI {
                     aclHtable.put(put);
                 }
             }
+            aclHtable.flushCommits();
         } finally {
             IOUtils.closeQuietly(aclHtable);
         }


[05/11] kylin git commit: minor, move utf8Length to class StringUtil

Posted by li...@apache.org.
minor, move utf8Length to class StringUtil

Signed-off-by: Li Yang <li...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/c52b6981
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/c52b6981
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/c52b6981

Branch: refs/heads/master-hbase0.98
Commit: c52b6981f9404a03dfa988eec67fa2dd23b99d94
Parents: bf81340
Author: Cheng Wang <ch...@kyligence.io>
Authored: Mon Feb 20 16:57:04 2017 +0800
Committer: Li Yang <li...@apache.org>
Committed: Mon Feb 20 17:00:34 2017 +0800

----------------------------------------------------------------------
 .../apache/kylin/common/util/StringUtil.java    | 19 +++++++++++++
 .../mr/steps/FactDistinctColumnsMapper.java     | 30 +++++---------------
 2 files changed, 26 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/c52b6981/core-common/src/main/java/org/apache/kylin/common/util/StringUtil.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/StringUtil.java b/core-common/src/main/java/org/apache/kylin/common/util/StringUtil.java
index 96d294b..964bf0d 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/StringUtil.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/StringUtil.java
@@ -164,4 +164,23 @@ public class StringUtil {
         return r.toArray(new String[r.size()]);
     }
 
+    // calculating length in UTF-8 of Java String without actually encoding it
+    public static int utf8Length(CharSequence sequence) {
+        int count = 0;
+        for (int i = 0, len = sequence.length(); i < len; i++) {
+            char ch = sequence.charAt(i);
+            if (ch <= 0x7F) {
+                count++;
+            } else if (ch <= 0x7FF) {
+                count += 2;
+            } else if (Character.isHighSurrogate(ch)) {
+                count += 4;
+                ++i;
+            } else {
+                count += 3;
+            }
+        }
+        return count;
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/c52b6981/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsMapper.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsMapper.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsMapper.java
index 9d0ff10..d9c1309 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsMapper.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsMapper.java
@@ -26,6 +26,7 @@ import java.util.List;
 import org.apache.hadoop.io.Text;
 import org.apache.kylin.common.util.ByteArray;
 import org.apache.kylin.common.util.Bytes;
+import org.apache.kylin.common.util.StringUtil;
 import org.apache.kylin.cube.cuboid.CuboidScheduler;
 import org.apache.kylin.engine.mr.common.BatchConstants;
 import org.apache.kylin.measure.BufferedMeasureCodec;
@@ -44,8 +45,10 @@ import com.google.common.hash.Hashing;
 public class FactDistinctColumnsMapper<KEYIN> extends FactDistinctColumnsMapperBase<KEYIN, Object> {
 
     private static final Logger logger = LoggerFactory.getLogger(FactDistinctColumnsMapper.class);
-    
-    public static enum RawDataCounter { BYTES };
+
+    public static enum RawDataCounter {
+        BYTES
+    };
 
     protected boolean collectStatistics = false;
     protected CuboidScheduler cuboidScheduler = null;
@@ -132,7 +135,7 @@ public class FactDistinctColumnsMapper<KEYIN> extends FactDistinctColumnsMapperB
     @Override
     public void doMap(KEYIN key, Object record, Context context) throws IOException, InterruptedException {
         String[] row = flatTableInputFormat.parseMapperInput(record);
-        
+
         context.getCounter(RawDataCounter.BYTES).increment(countSizeInBytes(row));
 
         for (int i = 0; i < factDictCols.size(); i++) {
@@ -188,30 +191,11 @@ public class FactDistinctColumnsMapper<KEYIN> extends FactDistinctColumnsMapperB
     private long countSizeInBytes(String[] row) {
         int size = 0;
         for (String s : row) {
-            size += s == null ? 1 : utf8Length(s);
+            size += s == null ? 1 : StringUtil.utf8Length(s);
             size++; // delimiter
         }
         return size;
     }
-    
-    // calculating length in UTF-8 of Java String without actually encoding it
-    public static int utf8Length(CharSequence sequence) {
-        int count = 0;
-        for (int i = 0, len = sequence.length(); i < len; i++) {
-            char ch = sequence.charAt(i);
-            if (ch <= 0x7F) {
-                count++;
-            } else if (ch <= 0x7FF) {
-                count += 2;
-            } else if (Character.isHighSurrogate(ch)) {
-                count += 4;
-                ++i;
-            } else {
-                count += 3;
-            }
-        }
-        return count;
-    }
 
     private void putRowKeyToHLL(String[] row) {
 


[08/11] kylin git commit: KYLIN-2428 Revert SparkEntry to commons-core

Posted by li...@apache.org.
KYLIN-2428 Revert SparkEntry to commons-core


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/e05d8f3d
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/e05d8f3d
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/e05d8f3d

Branch: refs/heads/master-hbase0.98
Commit: e05d8f3d87e2e7d0acf3bf47daa4e4bbd2d8d75f
Parents: ed413ee
Author: Billy Liu <bi...@apache.org>
Authored: Mon Feb 20 22:26:35 2017 +0800
Committer: Billy Liu <bi...@apache.org>
Committed: Mon Feb 20 22:26:35 2017 +0800

----------------------------------------------------------------------
 .../apache/kylin/common/util/SparkEntry.java    | 46 +++++++++++++++++++
 .../kylin/engine/spark/SparkExecutable.java     |  2 +-
 .../kylin/engine/spark/util/SparkEntry.java     | 48 --------------------
 3 files changed, 47 insertions(+), 49 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/e05d8f3d/core-common/src/main/java/org/apache/kylin/common/util/SparkEntry.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/SparkEntry.java b/core-common/src/main/java/org/apache/kylin/common/util/SparkEntry.java
new file mode 100644
index 0000000..ce11b91
--- /dev/null
+++ b/core-common/src/main/java/org/apache/kylin/common/util/SparkEntry.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *  
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kylin.common.util;
+
+import org.apache.commons.lang.StringUtils;
+
+/**
+ */
+public final class SparkEntry {
+
+    public static void main(String[] args) throws Exception {
+        System.out.println("SparkEntry args:" + StringUtils.join(args, " "));
+        if (!(args.length >= 2)) {
+            throw new IllegalArgumentException(String.valueOf("-className is required"));
+        }
+        if (!(args[0].equals("-className"))) {
+            throw new IllegalArgumentException(String.valueOf("-className is required"));
+        }
+        final String className = args[1];
+        final Object o = Class.<AbstractApplication> forName(className).newInstance();
+        if (!(o instanceof AbstractApplication)) {
+            throw new IllegalArgumentException(String.valueOf(className + " is not a subClass of AbstractApplication"));
+        }
+        String[] appArgs = new String[args.length - 2];
+        for (int i = 2; i < args.length; i++) {
+            appArgs[i - 2] = args[i];
+        }
+        AbstractApplication abstractApplication = (AbstractApplication) o;
+        abstractApplication.execute(appArgs);
+    }
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/e05d8f3d/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java
index 015b480..c671a91 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkExecutable.java
@@ -105,7 +105,7 @@ public class SparkExecutable extends AbstractExecutable {
         }
 
         StringBuilder stringBuilder = new StringBuilder();
-        stringBuilder.append("export HADOOP_CONF_DIR=%s && %s/bin/spark-submit --class org.apache.kylin.engine.spark.util.SparkEntry ");
+        stringBuilder.append("export HADOOP_CONF_DIR=%s && %s/bin/spark-submit --class org.apache.kylin.common.util.SparkEntry ");
 
         Map<String, String> sparkConfs = config.getSparkConfigOverride();
         for (Map.Entry<String, String> entry : sparkConfs.entrySet()) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/e05d8f3d/engine-spark/src/main/java/org/apache/kylin/engine/spark/util/SparkEntry.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/util/SparkEntry.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/util/SparkEntry.java
deleted file mode 100644
index 14788fc..0000000
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/util/SparkEntry.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *  
- *     http://www.apache.org/licenses/LICENSE-2.0
- *  
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.kylin.engine.spark.util;
-
-import org.apache.commons.lang.StringUtils;
-
-import org.apache.kylin.common.util.AbstractApplication;
-
-/**
- */
-public final class SparkEntry {
-
-    public static void main(String[] args) throws Exception {
-        System.out.println("SparkEntry args:" + StringUtils.join(args, " "));
-        if (!(args.length >= 2)) {
-            throw new IllegalArgumentException(String.valueOf("-className is required"));
-        }
-        if (!(args[0].equals("-className"))) {
-            throw new IllegalArgumentException(String.valueOf("-className is required"));
-        }
-        final String className = args[1];
-        final Object o = Class.<AbstractApplication> forName(className).newInstance();
-        if (!(o instanceof AbstractApplication)) {
-            throw new IllegalArgumentException(String.valueOf(className + " is not a subClass of AbstractSparkApplication"));
-        }
-        String[] appArgs = new String[args.length - 2];
-        for (int i = 2; i < args.length; i++) {
-            appArgs[i - 2] = args[i];
-        }
-        AbstractApplication abstractApplication = (AbstractApplication) o;
-        abstractApplication.execute(appArgs);
-    }
-}


[03/11] kylin git commit: KYLIN-2452 validate there is at least one AggregationGroup

Posted by li...@apache.org.
KYLIN-2452 validate there is at least one AggregationGroup

Signed-off-by: shaofengshi <sh...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/42d09a79
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/42d09a79
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/42d09a79

Branch: refs/heads/master-hbase0.98
Commit: 42d09a79848f2becf57deab8742b896d017791b3
Parents: bbcb0ed
Author: Cheng Wang <ch...@kyligence.io>
Authored: Fri Feb 17 10:05:28 2017 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Mon Feb 20 15:20:31 2017 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/cube/model/CubeDesc.java     | 18 ++++++++++--------
 .../kylin/rest/controller/CubeController.java     | 13 +++++++++----
 2 files changed, 19 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/42d09a79/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
index 0c1419d..0f804d1 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/model/CubeDesc.java
@@ -440,6 +440,8 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
     }
 
     public int getBuildLevel() {
+        if (aggregationGroups.size() <= 0)
+            throw new IllegalStateException("AggregationGroup size is: " + aggregationGroups.size() + ", there should be at least one AggregationGroup!");
         return Collections.max(Collections2.transform(aggregationGroups, new Function<AggregationGroup, Integer>() {
             @Nullable
             @Override
@@ -554,7 +556,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
         initMeasureColumns();
 
         rowkey.init(this);
-        
+
         validateAggregationGroups(); // check if aggregation group is valid
         for (AggregationGroup agg : this.aggregationGroups) {
             agg.init(this, rowkey);
@@ -752,13 +754,13 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
                 }
                 initDerivedMap(dimColArray, DeriveType.LOOKUP, join, derivedCols, derivedExtra);
             }
-            
+
             if (join != null) {
                 allColumns.addAll(Arrays.asList(join.getForeignKeyColumns()));
                 allColumns.addAll(Arrays.asList(join.getPrimaryKeyColumns()));
             }
         }
-        
+
         // PK-FK derive the other side
         Set<TblColRef> realDimensions = new HashSet<>(listDimensionColumnsExcludingDerived(true));
         for (JoinTableDesc joinTable : model.getJoinTables()) {
@@ -777,7 +779,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
             }
         }
     }
-    
+
     private String[][] splitDerivedColumnAndExtra(String[] derived) {
         String[] cols = new String[derived.length];
         String[] extra = new String[derived.length];
@@ -810,7 +812,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
                 i--;
             }
         }
-        
+
         if (derivedCols.length == 0)
             return;
 
@@ -826,7 +828,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
             infoList = new ArrayList<DeriveInfo>();
             hostToDerivedMap.put(hostColArray, infoList);
         }
-        
+
         // Merged duplicated derived column
         List<TblColRef> whatsLeft = new ArrayList<>();
         for (TblColRef derCol : derivedCols) {
@@ -966,7 +968,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
                 allColumns.addAll(Arrays.asList(join.getPrimaryKeyColumns()));
             }
         }
-        
+
         for (TblColRef col : allColumns) {
             allColumnDescs.add(col.getColumnDesc());
         }
@@ -1142,7 +1144,7 @@ public class CubeDesc extends RootPersistentEntity implements IEngineAware {
 
         return null;
     }
-    
+
     /** Get a column which can be used to cluster the source table.
      * To reduce memory footprint in base cuboid for global dict */
     // TODO handle more than one ultra high cardinality columns use global dict in one cube

http://git-wip-us.apache.org/repos/asf/kylin/blob/42d09a79/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
index f905fe1..fb4be5a 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
@@ -327,10 +327,10 @@ public class CubeController extends BasicController {
                 throw new InternalErrorException("Cannot find cube '" + cubeName + "'");
             }
 
-//            if (cube.getSegments() != null && cube.getBuildingSegments().size() > 0) {
-//                int num = cube.getBuildingSegments().size();
-//                throw new InternalErrorException("Cannot purge cube '" + cubeName + "' as there is " + num + " building " + (num > 1 ? "segment(s)." : "segment. Discard the related job first."));
-//            }
+            //            if (cube.getSegments() != null && cube.getBuildingSegments().size() > 0) {
+            //                int num = cube.getBuildingSegments().size();
+            //                throw new InternalErrorException("Cannot purge cube '" + cubeName + "' as there is " + num + " building " + (num > 1 ? "segment(s)." : "segment. Discard the related job first."));
+            //            }
 
             return cubeService.purgeCube(cube);
         } catch (Exception e) {
@@ -426,6 +426,7 @@ public class CubeController extends BasicController {
     public CubeRequest saveCubeDesc(@RequestBody CubeRequest cubeRequest) {
 
         CubeDesc desc = deserializeCubeDesc(cubeRequest);
+
         if (desc == null) {
             cubeRequest.setMessage("CubeDesc is null.");
             return cubeRequest;
@@ -440,6 +441,10 @@ public class CubeController extends BasicController {
             throw new BadRequestException("Invalid Cube name, only letters, numbers and underline supported.");
         }
 
+        int aggSize = desc.getAggregationGroups().size();
+        if (aggSize <= 0)
+            throw new IllegalStateException("AggregationGroup size is: " + aggSize + ", there should be at least one AggregationGroup!");
+
         try {
             desc.setUuid(UUID.randomUUID().toString());
             String projectName = (null == cubeRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME : cubeRequest.getProject();


[11/11] kylin git commit: KYLIN-2307 Create a branch for master with HBase 0.98 API

Posted by li...@apache.org.
KYLIN-2307 Create a branch for master with HBase 0.98 API


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/f5432f21
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/f5432f21
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/f5432f21

Branch: refs/heads/master-hbase0.98
Commit: f5432f2142b24fc629fefb633df72351d3a10635
Parents: 5d2e6c1
Author: lidongsjtu <li...@apache.org>
Authored: Mon Jan 23 13:17:37 2017 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Tue Feb 21 00:49:22 2017 +0000

----------------------------------------------------------------------
 dev-support/test_all_against_hdp_2_2_4_2_2.sh   |  25 ++++
 dev-support/test_all_against_hdp_2_4_0_0_169.sh |  25 ----
 .../sandbox/capacity-scheduler.xml              |  17 ++-
 examples/test_case_data/sandbox/core-site.xml   |  28 +---
 examples/test_case_data/sandbox/hbase-site.xml  | 119 +++++------------
 examples/test_case_data/sandbox/hdfs-site.xml   |  84 +++++-------
 examples/test_case_data/sandbox/hive-site.xml   |  89 +++++--------
 examples/test_case_data/sandbox/mapred-site.xml |  57 +++------
 examples/test_case_data/sandbox/yarn-site.xml   | 127 +++----------------
 .../kylin/provision/BuildCubeWithEngine.java    |  17 +--
 pom.xml                                         | 117 +----------------
 .../kylin/rest/security/AclHBaseStorage.java    |   4 +-
 .../rest/security/MockAclHBaseStorage.java      |   8 +-
 .../apache/kylin/rest/security/MockHTable.java  |  95 +++++++++++---
 .../rest/security/RealAclHBaseStorage.java      |   9 +-
 .../apache/kylin/rest/service/AclService.java   |  25 ++--
 .../apache/kylin/rest/service/CubeService.java  |  35 +++--
 .../apache/kylin/rest/service/QueryService.java |  24 ++--
 .../apache/kylin/rest/service/UserService.java  |  17 +--
 .../kylin/storage/hbase/HBaseConnection.java    |  44 +++----
 .../kylin/storage/hbase/HBaseResourceStore.java |  31 +++--
 .../storage/hbase/cube/SimpleHBaseStore.java    |  20 +--
 .../hbase/cube/v2/CubeHBaseEndpointRPC.java     |  13 +-
 .../storage/hbase/cube/v2/CubeHBaseScanRPC.java |   9 +-
 .../coprocessor/endpoint/CubeVisitService.java  |   4 +-
 .../storage/hbase/steps/CubeHTableUtil.java     |  16 +--
 .../storage/hbase/steps/DeprecatedGCStep.java   |  24 ++--
 .../storage/hbase/steps/HBaseCuboidWriter.java  |   7 +-
 .../kylin/storage/hbase/steps/MergeGCStep.java  |  23 ++--
 .../storage/hbase/util/CleanHtableCLI.java      |  12 +-
 .../storage/hbase/util/CubeMigrationCLI.java    |  37 +++---
 .../hbase/util/CubeMigrationCheckCLI.java       |  17 +--
 .../hbase/util/DeployCoprocessorCLI.java        |  27 ++--
 .../hbase/util/ExtendCubeToHybridCLI.java       |   8 +-
 .../hbase/util/GridTableHBaseBenchmark.java     |  34 ++---
 .../kylin/storage/hbase/util/HBaseClean.java    |  18 ++-
 .../hbase/util/HBaseRegionSizeCalculator.java   |  35 +++--
 .../kylin/storage/hbase/util/HBaseUsage.java    |   9 +-
 .../storage/hbase/util/HbaseStreamingInput.java |  30 ++---
 .../hbase/util/HtableAlterMetadataCLI.java      |   9 +-
 .../storage/hbase/util/OrphanHBaseCleanJob.java |  19 +--
 .../kylin/storage/hbase/util/PingHBaseCLI.java  |  15 +--
 .../kylin/storage/hbase/util/RowCounterCLI.java |  11 +-
 .../storage/hbase/util/StorageCleanupJob.java   |  20 ++-
 .../storage/hbase/util/UpdateHTableHostCLI.java |  17 +--
 tool/pom.xml                                    |  10 --
 .../org/apache/kylin/tool/CubeMigrationCLI.java |  19 +--
 .../kylin/tool/ExtendCubeToHybridCLI.java       |   8 +-
 48 files changed, 596 insertions(+), 872 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/dev-support/test_all_against_hdp_2_2_4_2_2.sh
----------------------------------------------------------------------
diff --git a/dev-support/test_all_against_hdp_2_2_4_2_2.sh b/dev-support/test_all_against_hdp_2_2_4_2_2.sh
new file mode 100755
index 0000000..f7780dd
--- /dev/null
+++ b/dev-support/test_all_against_hdp_2_2_4_2_2.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+dir=$(dirname ${0})
+cd ${dir}
+cd ..
+
+mvn clean install -DskipTests 2>&1 | tee mci.log
+mvn verify -Dhdp.version=${HDP_VERSION:-"2.2.4.2-2"} -fae 2>&1 | tee mvnverify.log

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/dev-support/test_all_against_hdp_2_4_0_0_169.sh
----------------------------------------------------------------------
diff --git a/dev-support/test_all_against_hdp_2_4_0_0_169.sh b/dev-support/test_all_against_hdp_2_4_0_0_169.sh
deleted file mode 100755
index 2a3d24b..0000000
--- a/dev-support/test_all_against_hdp_2_4_0_0_169.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-dir=$(dirname ${0})
-cd ${dir}
-cd ..
-
-mvn clean install -DskipTests 2>&1 | tee mci.log
-mvn verify -Dhdp.version=${HDP_VERSION:-"2.4.0.0-169"} -fae 2>&1 | tee mvnverify.log

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/examples/test_case_data/sandbox/capacity-scheduler.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/capacity-scheduler.xml b/examples/test_case_data/sandbox/capacity-scheduler.xml
index e042aa5..7cb985c 100644
--- a/examples/test_case_data/sandbox/capacity-scheduler.xml
+++ b/examples/test_case_data/sandbox/capacity-scheduler.xml
@@ -47,6 +47,16 @@
     </property>
 
     <property>
+        <name>yarn.scheduler.capacity.root.accessible-node-labels.default.capacity</name>
+        <value>-1</value>
+    </property>
+
+    <property>
+        <name>yarn.scheduler.capacity.root.accessible-node-labels.default.maximum-capacity</name>
+        <value>-1</value>
+    </property>
+
+    <property>
         <name>yarn.scheduler.capacity.root.acl_administer_queue</name>
         <value>*</value>
     </property>
@@ -57,6 +67,11 @@
     </property>
 
     <property>
+        <name>yarn.scheduler.capacity.root.default-node-label-expression</name>
+        <value></value>
+    </property>
+
+    <property>
         <name>yarn.scheduler.capacity.root.default.acl_administer_jobs</name>
         <value>*</value>
     </property>
@@ -96,4 +111,4 @@
         <value>default</value>
     </property>
 
-</configuration>
\ No newline at end of file
+</configuration>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/examples/test_case_data/sandbox/core-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/core-site.xml b/examples/test_case_data/sandbox/core-site.xml
index a4ad5c6..0c5f62b 100644
--- a/examples/test_case_data/sandbox/core-site.xml
+++ b/examples/test_case_data/sandbox/core-site.xml
@@ -19,6 +19,7 @@
     <property>
         <name>fs.defaultFS</name>
         <value>hdfs://sandbox.hortonworks.com:8020</value>
+        <final>true</final>
     </property>
 
     <property>
@@ -38,7 +39,7 @@
 
     <property>
         <name>hadoop.proxyuser.falcon.groups</name>
-        <value>*</value>
+        <value>users</value>
     </property>
 
     <property>
@@ -48,7 +49,7 @@
 
     <property>
         <name>hadoop.proxyuser.hbase.groups</name>
-        <value>*</value>
+        <value>users</value>
     </property>
 
     <property>
@@ -67,23 +68,13 @@
     </property>
 
     <property>
-        <name>hadoop.proxyuser.hdfs.groups</name>
-        <value>*</value>
-    </property>
-
-    <property>
-        <name>hadoop.proxyuser.hdfs.hosts</name>
-        <value>*</value>
-    </property>
-
-    <property>
         <name>hadoop.proxyuser.hive.groups</name>
-        <value>*</value>
+        <value>users</value>
     </property>
 
     <property>
         <name>hadoop.proxyuser.hive.hosts</name>
-        <value>sandbox.hortonworks.com</value>
+        <value>*</value>
     </property>
 
     <property>
@@ -132,15 +123,8 @@
     </property>
 
     <property>
-        <name>hadoop.security.key.provider.path</name>
-        <value></value>
-    </property>
-
-    <property>
         <name>io.compression.codecs</name>
-        <value>
-            org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec
-        </value>
+        <value>org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec</value>
     </property>
 
     <property>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/examples/test_case_data/sandbox/hbase-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/hbase-site.xml b/examples/test_case_data/sandbox/hbase-site.xml
index 568de2e..46d5345 100644
--- a/examples/test_case_data/sandbox/hbase-site.xml
+++ b/examples/test_case_data/sandbox/hbase-site.xml
@@ -22,33 +22,8 @@
     </property>
 
     <property>
-        <name>hbase.bucketcache.ioengine</name>
-        <value></value>
-    </property>
-
-    <property>
-        <name>hbase.bucketcache.percentage.in.combinedcache</name>
-        <value></value>
-    </property>
-
-    <property>
-        <name>hbase.bucketcache.size</name>
-        <value></value>
-    </property>
-
-    <property>
-        <name>hbase.bulkload.staging.dir</name>
-        <value>/apps/hbase/staging</value>
-    </property>
-
-    <property>
         <name>hbase.client.keyvalue.maxsize</name>
-        <value>1048576</value>
-    </property>
-
-    <property>
-        <name>hbase.client.retries.number</name>
-        <value>35</value>
+        <value>10485760</value>
     </property>
 
     <property>
@@ -63,19 +38,12 @@
 
     <property>
         <name>hbase.coprocessor.master.classes</name>
-        <value>org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor</value>
+        <value>com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor</value>
     </property>
 
     <property>
         <name>hbase.coprocessor.region.classes</name>
-        <value>
-            org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor
-        </value>
-    </property>
-
-    <property>
-        <name>hbase.coprocessor.regionserver.classes</name>
-        <value></value>
+        <value>com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor</value>
     </property>
 
     <property>
@@ -119,11 +87,6 @@
     </property>
 
     <property>
-        <name>hbase.hstore.compaction.max</name>
-        <value>10</value>
-    </property>
-
-    <property>
         <name>hbase.hstore.compactionThreshold</name>
         <value>3</value>
     </property>
@@ -140,42 +103,32 @@
 
     <property>
         <name>hbase.master.info.port</name>
-        <value>16010</value>
+        <value>60010</value>
     </property>
 
     <property>
         <name>hbase.master.port</name>
-        <value>16000</value>
+        <value>60000</value>
     </property>
 
     <property>
-        <name>hbase.region.server.rpc.scheduler.factory.class</name>
-        <value></value>
+        <name>hbase.regionserver.global.memstore.lowerLimit</name>
+        <value>0.38</value>
     </property>
 
     <property>
-        <name>hbase.regionserver.global.memstore.size</name>
+        <name>hbase.regionserver.global.memstore.upperLimit</name>
         <value>0.4</value>
     </property>
 
     <property>
         <name>hbase.regionserver.handler.count</name>
-        <value>30</value>
+        <value>60</value>
     </property>
 
     <property>
         <name>hbase.regionserver.info.port</name>
-        <value>16030</value>
-    </property>
-
-    <property>
-        <name>hbase.regionserver.port</name>
-        <value>16020</value>
-    </property>
-
-    <property>
-        <name>hbase.regionserver.wal.codec</name>
-        <value>org.apache.hadoop.hbase.regionserver.wal.WALCellCodec</value>
+        <value>60030</value>
     </property>
 
     <property>
@@ -184,26 +137,11 @@
     </property>
 
     <property>
-        <name>hbase.rpc.controllerfactory.class</name>
-        <value></value>
-    </property>
-
-    <property>
-        <name>hbase.rpc.engine</name>
-        <value>org.apache.hadoop.hbase.ipc.SecureRpcEngine</value>
-    </property>
-
-    <property>
         <name>hbase.rpc.protection</name>
         <value>PRIVACY</value>
     </property>
 
     <property>
-        <name>hbase.rpc.timeout</name>
-        <value>90000</value>
-    </property>
-
-    <property>
         <name>hbase.security.authentication</name>
         <value>simple</value>
     </property>
@@ -220,7 +158,7 @@
 
     <property>
         <name>hbase.tmp.dir</name>
-        <value>/tmp/hbase-${user.name}</value>
+        <value>/hadoop/hbase</value>
     </property>
 
     <property>
@@ -240,27 +178,34 @@
 
     <property>
         <name>hfile.block.cache.size</name>
-        <value>0.4</value>
-    </property>
-
-    <property>
-        <name>phoenix.functions.allowUserDefinedFunctions</name>
-        <value></value>
-    </property>
-
-    <property>
-        <name>phoenix.query.timeoutMs</name>
-        <value>60000</value>
+        <value>0.40</value>
     </property>
 
     <property>
         <name>zookeeper.session.timeout</name>
-        <value>60000</value>
+        <value>30000</value>
     </property>
 
     <property>
         <name>zookeeper.znode.parent</name>
         <value>/hbase-unsecure</value>
     </property>
-
-</configuration>
\ No newline at end of file
+    <property>
+        <name>hbase.client.pause</name>
+        <value>100</value>
+        <description>General client pause value.  Used mostly as value to wait
+            before running a retry of a failed get, region lookup, etc.
+            See hbase.client.retries.number for description of how we backoff from
+            this initial pause amount and how this pause works w/ retries.</description>
+    </property>
+    <property>
+        <name>hbase.client.retries.number</name>
+        <value>5</value>
+        <description>Maximum retries.  Used as maximum for all retryable
+            operations such as the getting of a cell's value, starting a row update,
+            etc.  Retry interval is a rough function based on hbase.client.pause.  At
+            first we retry at this interval but then with backoff, we pretty quickly reach
+            retrying every ten seconds.  See HConstants#RETRY_BACKOFF for how the backup
+            ramps up.  Change this setting and hbase.client.pause to suit your workload.</description>
+    </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/examples/test_case_data/sandbox/hdfs-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/hdfs-site.xml b/examples/test_case_data/sandbox/hdfs-site.xml
index 1d9040a..1175fff 100644
--- a/examples/test_case_data/sandbox/hdfs-site.xml
+++ b/examples/test_case_data/sandbox/hdfs-site.xml
@@ -18,7 +18,12 @@
 
     <property>
         <name>dfs.block.access.token.enable</name>
-        <value>true</value>
+        <value>false</value>
+    </property>
+
+    <property>
+        <name>dfs.block.size</name>
+        <value>34217472</value>
     </property>
 
     <property>
@@ -42,21 +47,11 @@
     </property>
 
     <property>
-        <name>dfs.client.retry.policy.enabled</name>
-        <value>false</value>
-    </property>
-
-    <property>
         <name>dfs.cluster.administrators</name>
         <value>hdfs</value>
     </property>
 
     <property>
-        <name>dfs.content-summary.limit</name>
-        <value>5000</value>
-    </property>
-
-    <property>
         <name>dfs.datanode.address</name>
         <value>0.0.0.0:50010</value>
     </property>
@@ -69,6 +64,7 @@
     <property>
         <name>dfs.datanode.data.dir</name>
         <value>/hadoop/hdfs/data</value>
+        <final>true</final>
     </property>
 
     <property>
@@ -84,6 +80,7 @@
     <property>
         <name>dfs.datanode.failed.volumes.tolerated</name>
         <value>0</value>
+        <final>true</final>
     </property>
 
     <property>
@@ -107,18 +104,13 @@
     </property>
 
     <property>
-        <name>dfs.domain.socket.path</name>
-        <value>/var/lib/hadoop-hdfs/dn_socket</value>
-    </property>
-
-    <property>
-        <name>dfs.encrypt.data.transfer.cipher.suites</name>
-        <value>AES/CTR/NoPadding</value>
+        <name>dfs.datanode.max.xcievers</name>
+        <value>1024</value>
     </property>
 
     <property>
-        <name>dfs.encryption.key.provider.uri</name>
-        <value></value>
+        <name>dfs.domain.socket.path</name>
+        <value>/var/lib/hadoop-hdfs/dn_socket</value>
     </property>
 
     <property>
@@ -158,12 +150,7 @@
 
     <property>
         <name>dfs.namenode.accesstime.precision</name>
-        <value>0</value>
-    </property>
-
-    <property>
-        <name>dfs.namenode.audit.log.async</name>
-        <value>true</value>
+        <value>3600000</value>
     </property>
 
     <property>
@@ -197,11 +184,6 @@
     </property>
 
     <property>
-        <name>dfs.namenode.fslock.fair</name>
-        <value>false</value>
-    </property>
-
-    <property>
         <name>dfs.namenode.handler.count</name>
         <value>100</value>
     </property>
@@ -209,6 +191,7 @@
     <property>
         <name>dfs.namenode.http-address</name>
         <value>sandbox.hortonworks.com:50070</value>
+        <final>true</final>
     </property>
 
     <property>
@@ -217,13 +200,9 @@
     </property>
 
     <property>
-        <name>dfs.namenode.inode.attributes.provider.class</name>
-        <value>org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer</value>
-    </property>
-
-    <property>
         <name>dfs.namenode.name.dir</name>
         <value>/hadoop/hdfs/namenode</value>
+        <final>true</final>
     </property>
 
     <property>
@@ -232,13 +211,8 @@
     </property>
 
     <property>
-        <name>dfs.namenode.rpc-address</name>
-        <value>sandbox.hortonworks.com:8020</value>
-    </property>
-
-    <property>
         <name>dfs.namenode.safemode.threshold-pct</name>
-        <value>0.999</value>
+        <value>1.0f</value>
     </property>
 
     <property>
@@ -262,6 +236,16 @@
     </property>
 
     <property>
+        <name>dfs.nfs.exports.allowed.hosts</name>
+        <value>* rw</value>
+    </property>
+
+    <property>
+        <name>dfs.nfs3.dump.dir</name>
+        <value>/tmp/.hdfs-nfs</value>
+    </property>
+
+    <property>
         <name>dfs.permissions.enabled</name>
         <value>true</value>
     </property>
@@ -273,7 +257,7 @@
 
     <property>
         <name>dfs.replication</name>
-        <value>3</value>
+        <value>1</value>
     </property>
 
     <property>
@@ -284,11 +268,13 @@
     <property>
         <name>dfs.support.append</name>
         <value>true</value>
+        <final>true</final>
     </property>
 
     <property>
         <name>dfs.webhdfs.enabled</name>
         <value>true</value>
+        <final>true</final>
     </property>
 
     <property>
@@ -296,14 +282,4 @@
         <value>022</value>
     </property>
 
-    <property>
-        <name>nfs.exports.allowed.hosts</name>
-        <value>* rw</value>
-    </property>
-
-    <property>
-        <name>nfs.file.dump.dir</name>
-        <value>/tmp/.hdfs-nfs</value>
-    </property>
-
-</configuration>
\ No newline at end of file
+</configuration>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/examples/test_case_data/sandbox/hive-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/hive-site.xml b/examples/test_case_data/sandbox/hive-site.xml
index a8c210e..1e78107 100644
--- a/examples/test_case_data/sandbox/hive-site.xml
+++ b/examples/test_case_data/sandbox/hive-site.xml
@@ -22,46 +22,11 @@
     </property>
 
     <property>
-        <name>atlas.cluster.name</name>
-        <value>Sandbox</value>
-    </property>
-
-    <property>
-        <name>atlas.hook.hive.maxThreads</name>
-        <value>1</value>
-    </property>
-
-    <property>
-        <name>atlas.hook.hive.minThreads</name>
-        <value>1</value>
-    </property>
-
-    <property>
-        <name>atlas.hook.hive.synchronous</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>atlas.rest.address</name>
-        <value>http://sandbox.hortonworks.com:21000</value>
-    </property>
-
-    <property>
-        <name>datanucleus.autoCreateSchema</name>
-        <value>false</value>
-    </property>
-
-    <property>
         <name>datanucleus.cache.level2.type</name>
         <value>none</value>
     </property>
 
     <property>
-        <name>datanucleus.fixedDatastore</name>
-        <value>true</value>
-    </property>
-
-    <property>
         <name>hive.auto.convert.join</name>
         <value>true</value>
     </property>
@@ -73,7 +38,7 @@
 
     <property>
         <name>hive.auto.convert.join.noconditionaltask.size</name>
-        <value>357913941</value>
+        <value>1000000000</value>
     </property>
 
     <property>
@@ -162,16 +127,6 @@
     </property>
 
     <property>
-        <name>hive.default.fileformat</name>
-        <value>TextFile</value>
-    </property>
-
-    <property>
-        <name>hive.default.fileformat.managed</name>
-        <value>TextFile</value>
-    </property>
-
-    <property>
         <name>hive.enforce.bucketing</name>
         <value>true</value>
     </property>
@@ -207,6 +162,11 @@
     </property>
 
     <property>
+        <name>hive.exec.failure.hooks</name>
+        <value>org.apache.hadoop.hive.ql.hooks.ATSHook</value>
+    </property>
+
+    <property>
         <name>hive.exec.max.created.files</name>
         <value>100000</value>
     </property>
@@ -237,11 +197,6 @@
     </property>
 
     <property>
-        <name>hive.exec.orc.encoding.strategy</name>
-        <value>SPEED</value>
-    </property>
-
-    <property>
         <name>hive.exec.parallel</name>
         <value>false</value>
     </property>
@@ -252,6 +207,16 @@
     </property>
 
     <property>
+        <name>hive.exec.post.hooks</name>
+        <value>org.apache.hadoop.hive.ql.hooks.ATSHook</value>
+    </property>
+
+    <property>
+        <name>hive.exec.pre.hooks</name>
+        <value>org.apache.hadoop.hive.ql.hooks.ATSHook</value>
+    </property>
+
+    <property>
         <name>hive.exec.reducers.bytes.per.reducer</name>
         <value>67108864</value>
     </property>
@@ -297,6 +262,11 @@
     </property>
 
     <property>
+        <name>hive.heapsize</name>
+        <value>250</value>
+    </property>
+
+    <property>
         <name>hive.limit.optimize.enable</name>
         <value>true</value>
     </property>
@@ -508,7 +478,7 @@
 
     <property>
         <name>hive.prewarm.numcontainers</name>
-        <value>3</value>
+        <value>10</value>
     </property>
 
     <property>
@@ -518,7 +488,7 @@
 
     <property>
         <name>hive.security.authorization.enabled</name>
-        <value>true</value>
+        <value>false</value>
     </property>
 
     <property>
@@ -538,7 +508,7 @@
 
     <property>
         <name>hive.security.metastore.authorization.manager</name>
-        <value>org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider</value>
+        <value>org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider,org.apache.hadoop.hive.ql.security.authorization.MetaStoreAuthzAPIAuthorizerEmbedOnly</value>
     </property>
 
     <property>
@@ -563,7 +533,12 @@
 
     <property>
         <name>hive.server2.enable.doAs</name>
-        <value>false</value>
+        <value>true</value>
+    </property>
+
+    <property>
+        <name>hive.server2.enable.impersonation</name>
+        <value>true</value>
     </property>
 
     <property>
@@ -573,7 +548,7 @@
 
     <property>
         <name>hive.server2.logging.operation.log.location</name>
-        <value>/tmp/hive/operation_logs</value>
+        <value>${system:java.io.tmpdir}/${system:user.name}/operation_logs</value>
     </property>
 
     <property>
@@ -678,7 +653,7 @@
 
     <property>
         <name>hive.tez.container.size</name>
-        <value>1024</value>
+        <value>250</value>
     </property>
 
     <property>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/examples/test_case_data/sandbox/mapred-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/mapred-site.xml b/examples/test_case_data/sandbox/mapred-site.xml
index be470f9..e90f594 100644
--- a/examples/test_case_data/sandbox/mapred-site.xml
+++ b/examples/test_case_data/sandbox/mapred-site.xml
@@ -18,7 +18,7 @@
 
     <property>
         <name>io.sort.mb</name>
-        <value>64</value>
+        <value>128</value>
     </property>
 
     <property>
@@ -27,13 +27,13 @@
     </property>
 
     <property>
-        <name>mapred.job.map.memory.mb</name>
-        <value>250</value>
+        <name>mapreduce.map.memory.mb</name>
+        <value>512</value>
     </property>
 
     <property>
-        <name>mapred.job.reduce.memory.mb</name>
-        <value>250</value>
+        <name>mapreduce.reduce.memory.mb</name>
+        <value>512</value>
     </property>
 
     <property>
@@ -48,9 +48,7 @@
 
     <property>
         <name>mapreduce.admin.user.env</name>
-        <value>
-            LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-amd64-64
-        </value>
+        <value>LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-amd64-64</value>
     </property>
 
     <property>
@@ -60,9 +58,7 @@
 
     <property>
         <name>mapreduce.application.classpath</name>
-        <value>
-            $PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.${hdp.version}.jar:/etc/hadoop/conf/secure
-        </value>
+        <value>/tmp/kylin/*,$HADOOP_CONF_DIR,/usr/hdp/${hdp.version}/hbase/lib/hbase-common.jar,/usr/hdp/current/hive-client/conf/,$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.${hdp.version}.jar:/usr/hdp/${hdp.version}/hadoop/lib/snappy-java-1.0.4.1.jar:/etc/hadoop/conf/secure</value>
     </property>
 
     <property>
@@ -81,18 +77,14 @@
     </property>
 
     <property>
-        <name>mapreduce.job.counters.max</name>
-        <value>130</value>
-    </property>
-
-    <property>
         <name>mapreduce.job.emit-timeline-data</name>
         <value>false</value>
     </property>
 
+    <!--the default value on hdp is 0.05, however for test environments we need to be conservative on resource -->
     <property>
         <name>mapreduce.job.reduce.slowstart.completedmaps</name>
-        <value>0.05</value>
+        <value>1</value>
     </property>
 
     <property>
@@ -116,28 +108,13 @@
     </property>
 
     <property>
-        <name>mapreduce.jobhistory.recovery.enable</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>mapreduce.jobhistory.recovery.store.class</name>
-        <value>org.apache.hadoop.mapreduce.v2.hs.HistoryServerLeveldbStateStoreService</value>
-    </property>
-
-    <property>
-        <name>mapreduce.jobhistory.recovery.store.leveldb.path</name>
-        <value>/hadoop/mapreduce/jhs</value>
-    </property>
-
-    <property>
         <name>mapreduce.jobhistory.webapp.address</name>
         <value>sandbox.hortonworks.com:19888</value>
     </property>
 
     <property>
         <name>mapreduce.map.java.opts</name>
-        <value>-Xmx1228m</value>
+        <value>-Xmx512m</value>
     </property>
 
     <property>
@@ -147,7 +124,7 @@
 
     <property>
         <name>mapreduce.map.memory.mb</name>
-        <value>1536</value>
+        <value>512</value>
     </property>
 
     <property>
@@ -182,7 +159,7 @@
 
     <property>
         <name>mapreduce.reduce.java.opts</name>
-        <value>-Xmx1638m</value>
+        <value>-Xmx200m</value>
     </property>
 
     <property>
@@ -192,7 +169,7 @@
 
     <property>
         <name>mapreduce.reduce.memory.mb</name>
-        <value>2048</value>
+        <value>512</value>
     </property>
 
     <property>
@@ -242,7 +219,7 @@
 
     <property>
         <name>mapreduce.task.io.sort.mb</name>
-        <value>859</value>
+        <value>128</value>
     </property>
 
     <property>
@@ -257,7 +234,7 @@
 
     <property>
         <name>yarn.app.mapreduce.am.command-opts</name>
-        <value>-Xmx819m -Dhdp.version=${hdp.version}</value>
+        <value>-Xmx512m</value>
     </property>
 
     <property>
@@ -267,7 +244,7 @@
 
     <property>
         <name>yarn.app.mapreduce.am.resource.mb</name>
-        <value>1024</value>
+        <value>512</value>
     </property>
 
     <property>
@@ -275,4 +252,4 @@
         <value>/user</value>
     </property>
 
-</configuration>
\ No newline at end of file
+</configuration>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/examples/test_case_data/sandbox/yarn-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/yarn-site.xml b/examples/test_case_data/sandbox/yarn-site.xml
index ebdf44a..8256158 100644
--- a/examples/test_case_data/sandbox/yarn-site.xml
+++ b/examples/test_case_data/sandbox/yarn-site.xml
@@ -18,7 +18,7 @@
 
     <property>
         <name>hadoop.registry.rm.enabled</name>
-        <value>true</value>
+        <value>false</value>
     </property>
 
     <property>
@@ -28,29 +28,22 @@
 
     <property>
         <name>yarn.acl.enable</name>
-        <value>true</value>
+        <value>false</value>
     </property>
 
     <property>
         <name>yarn.admin.acl</name>
-        <value>*</value>
+        <value></value>
     </property>
 
     <property>
         <name>yarn.application.classpath</name>
-        <value>
-            $HADOOP_CONF_DIR,/usr/hdp/current/hadoop-client/*,/usr/hdp/current/hadoop-client/lib/*,/usr/hdp/current/hadoop-hdfs-client/*,/usr/hdp/current/hadoop-hdfs-client/lib/*,/usr/hdp/current/hadoop-yarn-client/*,/usr/hdp/current/hadoop-yarn-client/lib/*
-        </value>
-    </property>
-
-    <property>
-        <name>yarn.authorization-provider</name>
-        <value>org.apache.ranger.authorization.yarn.authorizer.RangerYarnAuthorizer</value>
+        <value>$HADOOP_CONF_DIR,/usr/hdp/current/hadoop-client/*,/usr/hdp/current/hadoop-client/lib/*,/usr/hdp/current/hadoop-hdfs-client/*,/usr/hdp/current/hadoop-hdfs-client/lib/*,/usr/hdp/current/hadoop-yarn-client/*,/usr/hdp/current/hadoop-yarn-client/lib/*</value>
     </property>
 
     <property>
         <name>yarn.client.nodemanager-connect.max-wait-ms</name>
-        <value>120000</value>
+        <value>60000</value>
     </property>
 
     <property>
@@ -79,11 +72,6 @@
     </property>
 
     <property>
-        <name>yarn.node-labels.enabled</name>
-        <value>false</value>
-    </property>
-
-    <property>
         <name>yarn.node-labels.fs-store.retry-policy-spec</name>
         <value>2000, 500</value>
     </property>
@@ -94,6 +82,11 @@
     </property>
 
     <property>
+        <name>yarn.node-labels.manager-class</name>
+        <value>org.apache.hadoop.yarn.server.resourcemanager.nodelabels.MemoryRMNodeLabelsManager</value>
+    </property>
+
+    <property>
         <name>yarn.nodemanager.address</name>
         <value>0.0.0.0:45454</value>
     </property>
@@ -105,7 +98,7 @@
 
     <property>
         <name>yarn.nodemanager.aux-services</name>
-        <value>mapreduce_shuffle,spark_shuffle</value>
+        <value>mapreduce_shuffle</value>
     </property>
 
     <property>
@@ -114,11 +107,6 @@
     </property>
 
     <property>
-        <name>yarn.nodemanager.aux-services.spark_shuffle.class</name>
-        <value>org.apache.spark.network.yarn.YarnShuffleService</value>
-    </property>
-
-    <property>
         <name>yarn.nodemanager.bind-host</name>
         <value>0.0.0.0</value>
     </property>
@@ -160,7 +148,7 @@
 
     <property>
         <name>yarn.nodemanager.health-checker.script.timeout-ms</name>
-        <value>120000</value>
+        <value>60000</value>
     </property>
 
     <property>
@@ -255,12 +243,12 @@
 
     <property>
         <name>yarn.nodemanager.resource.memory-mb</name>
-        <value>7168</value>
+        <value>9216</value>
     </property>
 
     <property>
         <name>yarn.nodemanager.resource.percentage-physical-cpu-limit</name>
-        <value>80</value>
+        <value>100</value>
     </property>
 
     <property>
@@ -349,11 +337,6 @@
     </property>
 
     <property>
-        <name>yarn.resourcemanager.scheduler.monitor.enable</name>
-        <value>false</value>
-    </property>
-
-    <property>
         <name>yarn.resourcemanager.state-store.max-completed-applications</name>
         <value>${yarn.resourcemanager.max-completed-applications}</value>
     </property>
@@ -385,7 +368,7 @@
 
     <property>
         <name>yarn.resourcemanager.webapp.https.address</name>
-        <value>sandbox.hortonworks.com:8090</value>
+        <value>localhost:8090</value>
     </property>
 
     <property>
@@ -425,7 +408,7 @@
 
     <property>
         <name>yarn.resourcemanager.zk-address</name>
-        <value>sandbox.hortonworks.com:2181</value>
+        <value>localhost:2181</value>
     </property>
 
     <property>
@@ -450,22 +433,12 @@
 
     <property>
         <name>yarn.scheduler.maximum-allocation-mb</name>
-        <value>7168</value>
-    </property>
-
-    <property>
-        <name>yarn.scheduler.maximum-allocation-vcores</name>
-        <value>3</value>
+        <value>9216</value>
     </property>
 
     <property>
         <name>yarn.scheduler.minimum-allocation-mb</name>
-        <value>1024</value>
-    </property>
-
-    <property>
-        <name>yarn.scheduler.minimum-allocation-vcores</name>
-        <value>1</value>
+        <value>1536</value>
     </property>
 
     <property>
@@ -494,41 +467,6 @@
     </property>
 
     <property>
-        <name>yarn.timeline-service.entity-group-fs-store.active-dir</name>
-        <value>/ats/active/</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.entity-group-fs-store.cleaner-interval-seconds</name>
-        <value>3600</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.entity-group-fs-store.done-dir</name>
-        <value>/ats/done/</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.entity-group-fs-store.group-id-plugin-classes</name>
-        <value>org.apache.tez.dag.history.logging.ats.TimelineCachePluginImpl</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.entity-group-fs-store.retain-seconds</name>
-        <value>604800</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.entity-group-fs-store.scan-interval-seconds</name>
-        <value>60</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.entity-group-fs-store.summary-store</name>
-        <value>org.apache.hadoop.yarn.server.timeline.RollingLevelDBTimelineStore</value>
-    </property>
-
-    <property>
         <name>yarn.timeline-service.generic-application-history.store-class</name>
         <value>org.apache.hadoop.yarn.server.applicationhistoryservice.NullApplicationHistoryStore</value>
     </property>
@@ -544,11 +482,6 @@
     </property>
 
     <property>
-        <name>yarn.timeline-service.leveldb-state-store.path</name>
-        <value>/hadoop/yarn/timeline</value>
-    </property>
-
-    <property>
         <name>yarn.timeline-service.leveldb-timeline-store.path</name>
         <value>/hadoop/yarn/timeline</value>
     </property>
@@ -574,23 +507,8 @@
     </property>
 
     <property>
-        <name>yarn.timeline-service.plugin.enabled</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.recovery.enabled</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.state-store-class</name>
-        <value>org.apache.hadoop.yarn.server.timeline.recovery.LeveldbTimelineStateStore</value>
-    </property>
-
-    <property>
         <name>yarn.timeline-service.store-class</name>
-        <value>org.apache.hadoop.yarn.server.timeline.EntityGroupFSTimelineStore</value>
+        <value>org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore</value>
     </property>
 
     <property>
@@ -604,11 +522,6 @@
     </property>
 
     <property>
-        <name>yarn.timeline-service.version</name>
-        <value>1.5</value>
-    </property>
-
-    <property>
         <name>yarn.timeline-service.webapp.address</name>
         <value>sandbox.hortonworks.com:8188</value>
     </property>
@@ -618,4 +531,4 @@
         <value>sandbox.hortonworks.com:8190</value>
     </property>
 
-</configuration>
\ No newline at end of file
+</configuration>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
index 726d72f..d43bc1e 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
@@ -32,9 +32,11 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 
 import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HTable;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
@@ -58,7 +60,6 @@ import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
 import org.apache.kylin.source.ISource;
 import org.apache.kylin.source.SourceFactory;
 import org.apache.kylin.source.SourcePartition;
-import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.util.HBaseRegionSizeCalculator;
 import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
 import org.apache.kylin.tool.StorageCleanupJob;
@@ -95,10 +96,10 @@ public class BuildCubeWithEngine {
             logger.error("error", e);
             exitCode = 1;
         }
-
+        
         long millis = System.currentTimeMillis() - start;
         System.out.println("Time elapsed: " + (millis / 1000) + " sec - in " + BuildCubeWithEngine.class.getName());
-
+        
         System.exit(exitCode);
     }
 
@@ -358,10 +359,10 @@ public class BuildCubeWithEngine {
 
     @SuppressWarnings("unused")
     private void checkHFilesInHBase(CubeSegment segment) throws IOException {
-        try (Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl())) {
-            String tableName = segment.getStorageLocationIdentifier();
-
-            HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(tableName, conn);
+        Configuration conf = HBaseConfiguration.create(HadoopUtil.getCurrentConfiguration());
+        String tableName = segment.getStorageLocationIdentifier();
+        try (HTable table = new HTable(conf, tableName)) {
+            HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(table);
             Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
             long totalSize = 0;
             for (Long size : sizeMap.values()) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index a103ae0..007c40f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -46,15 +46,15 @@
         <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
 
         <!-- Hadoop versions -->
-        <hadoop2.version>2.7.1</hadoop2.version>
-        <yarn.version>2.7.1</yarn.version>
+        <hadoop2.version>2.6.0</hadoop2.version>
+        <yarn.version>2.6.0</yarn.version>
 
         <!-- Hive versions -->
-        <hive.version>1.2.1</hive.version>
-        <hive-hcatalog.version>1.2.1</hive-hcatalog.version>
+        <hive.version>0.14.0</hive.version>
+        <hive-hcatalog.version>0.14.0</hive-hcatalog.version>
 
         <!-- HBase versions -->
-        <hbase-hadoop2.version>1.1.1</hbase-hadoop2.version>
+        <hbase-hadoop2.version>0.98.8-hadoop2</hbase-hadoop2.version>
 
         <!-- Kafka versions -->
         <kafka.version>0.10.1.0</kafka.version>
@@ -72,7 +72,7 @@
 
         <!-- Hadoop Common deps, keep compatible with hadoop2.version -->
         <zookeeper.version>3.4.6</zookeeper.version>
-        <curator.version>2.7.1</curator.version>
+        <curator.version>2.6.0</curator.version>
         <jsr305.version>3.0.1</jsr305.version>
         <guava.version>14.0</guava.version>
         <jsch.version>0.1.53</jsch.version>
@@ -840,11 +840,6 @@
             <id>conjars</id>
             <url>http://conjars.org/repo/</url>
         </repository>
-
-        <repository>
-            <id>cloudera</id>
-            <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
-        </repository>
     </repositories>
 
     <build>
@@ -1198,106 +1193,6 @@
             </build>
         </profile>
         <profile>
-            <id>cdh5.7</id>
-            <properties>
-                <hadoop2.version>2.6.0-cdh5.7.0</hadoop2.version>
-                <yarn.version>2.6.0-cdh5.7.0</yarn.version>
-                <hive.version>1.1.0-cdh5.7.0</hive.version>
-                <hive-hcatalog.version>1.1.0-cdh5.7.0</hive-hcatalog.version>
-                <hbase-hadoop2.version>1.2.0-cdh5.7.0</hbase-hadoop2.version>
-                <zookeeper.version>3.4.5-cdh5.7.0</zookeeper.version>
-            </properties>
-            <build>
-                <plugins>
-                    <plugin>
-                        <groupId>org.apache.maven.plugins</groupId>
-                        <artifactId>maven-compiler-plugin</artifactId>
-                        <configuration>
-                            <fork>true</fork>
-                            <meminitial>1024m</meminitial>
-                            <maxmem>2048m</maxmem>
-                        </configuration>
-                    </plugin>
-
-                    <plugin>
-                        <groupId>org.apache.maven.plugins</groupId>
-                        <artifactId>maven-dependency-plugin</artifactId>
-                        <executions>
-                            <execution>
-                                <id>copy-jamm</id>
-                                <goals>
-                                    <goal>copy</goal>
-                                </goals>
-                                <phase>generate-test-resources</phase>
-                                <configuration>
-                                    <artifactItems>
-                                        <artifactItem>
-                                            <groupId>com.github.jbellis</groupId>
-                                            <artifactId>jamm</artifactId>
-                                            <outputDirectory>${project.build.testOutputDirectory}</outputDirectory>
-                                            <destFileName>jamm.jar</destFileName>
-                                        </artifactItem>
-                                    </artifactItems>
-                                </configuration>
-                            </execution>
-                        </executions>
-                    </plugin>
-
-                    <plugin>
-                        <groupId>org.jacoco</groupId>
-                        <artifactId>jacoco-maven-plugin</artifactId>
-                        <configuration>
-                            <append>true</append>
-                            <destFile>
-                                ${sonar.jacoco.reportPath}
-                            </destFile>
-                        </configuration>
-                        <executions>
-                            <execution>
-                                <id>pre-test</id>
-                                <goals>
-                                    <goal>prepare-agent</goal>
-                                </goals>
-                                <configuration>
-                                    <propertyName>surefireArgLine</propertyName>
-                                </configuration>
-                            </execution>
-                            <execution>
-                                <id>post-test</id>
-                                <phase>test</phase>
-                                <goals>
-                                    <goal>report</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                    </plugin>
-
-                    <plugin>
-                        <groupId>org.apache.maven.plugins</groupId>
-                        <artifactId>maven-surefire-plugin</artifactId>
-                        <version>2.19.1</version>
-                        <configuration>
-                            <reportsDirectory>${project.basedir}/../target/surefire-reports</reportsDirectory>
-                            <excludes>
-                                <exclude>**/IT*.java</exclude>
-                            </excludes>
-                            <systemProperties>
-                                <property>
-                                    <name>buildCubeUsingProvidedData</name>
-                                    <value>false</value>
-                                </property>
-                                <property>
-                                    <name>log4j.configuration</name>
-                                    <value>file:${project.basedir}/../build/conf/kylin-tools-log4j.properties</value>
-                                </property>
-                            </systemProperties>
-                            <argLine>-javaagent:${project.build.testOutputDirectory}/jamm.jar ${argLine} ${surefireArgLine}</argLine>
-                        </configuration>
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
-        <profile>
             <!-- This profile adds/overrides few features of the 'apache-release'
                  profile in the parent pom. -->
             <id>apache-release</id>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
index 8095bf8..ea68855 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
@@ -20,7 +20,7 @@ package org.apache.kylin.rest.security;
 
 import java.io.IOException;
 
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
 
 /**
  */
@@ -36,6 +36,6 @@ public interface AclHBaseStorage {
 
     String prepareHBaseTable(Class<?> clazz) throws IOException;
 
-    Table getTable(String tableName) throws IOException;
+    HTableInterface getTable(String tableName) throws IOException;
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
index cc76b87..d9326f5 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
@@ -21,7 +21,7 @@ package org.apache.kylin.rest.security;
 import java.io.IOException;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.rest.service.AclService;
 import org.apache.kylin.rest.service.QueryService;
@@ -34,8 +34,8 @@ public class MockAclHBaseStorage implements AclHBaseStorage {
     private static final String aclTableName = "MOCK-ACL-TABLE";
     private static final String userTableName = "MOCK-USER-TABLE";
 
-    private Table mockedAclTable;
-    private Table mockedUserTable;
+    private HTableInterface mockedAclTable;
+    private HTableInterface mockedUserTable;
     private RealAclHBaseStorage realAcl;
 
     public MockAclHBaseStorage() {
@@ -65,7 +65,7 @@ public class MockAclHBaseStorage implements AclHBaseStorage {
     }
 
     @Override
-    public Table getTable(String tableName) throws IOException {
+    public HTableInterface getTable(String tableName) throws IOException {
         if (realAcl != null) {
             return realAcl.getTable(tableName);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java b/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
index 972eea9..d0aa0ed 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
@@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;
@@ -91,7 +91,7 @@ import com.google.protobuf.ServiceException;
  *     <li>remove some methods for loading data, checking values ...</li>
  * </ul>
  */
-public class MockHTable implements Table {
+public class MockHTable implements HTableInterface {
     private final String tableName;
     private final List<String> columnFamilies = new ArrayList<>();
 
@@ -114,6 +114,14 @@ public class MockHTable implements Table {
         this.columnFamilies.add(columnFamily);
     }
 
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public byte[] getTableName() {
+        return tableName.getBytes();
+    }
+
     @Override
     public TableName getName() {
         return null;
@@ -192,8 +200,8 @@ public class MockHTable implements Table {
     }
 
     @Override
-    public boolean[] existsAll(List<Get> list) throws IOException {
-        return new boolean[0];
+    public Boolean[] exists(List<Get> gets) throws IOException {
+        return new Boolean[0];
     }
 
     /**
@@ -298,6 +306,15 @@ public class MockHTable implements Table {
      * {@inheritDoc}
      */
     @Override
+    public Result getRowOrBefore(byte[] row, byte[] family) throws IOException {
+        // FIXME: implement
+        return null;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
     public ResultScanner getScanner(Scan scan) throws IOException {
         final List<Result> ret = new ArrayList<Result>();
         byte[] st = scan.getStartRow();
@@ -429,7 +446,7 @@ public class MockHTable implements Table {
              */
         }
         if (filter.hasFilterRow() && !filteredOnRowKey) {
-            filter.filterRow();
+            filter.filterRow(nkvs);
         }
         if (filter.filterRow() || filteredOnRowKey) {
             nkvs.clear();
@@ -518,11 +535,6 @@ public class MockHTable implements Table {
         return false;
     }
 
-    @Override
-    public boolean checkAndPut(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, byte[] bytes3, Put put) throws IOException {
-        return false;
-    }
-
     /**
      * {@inheritDoc}
      */
@@ -543,7 +555,7 @@ public class MockHTable implements Table {
                 continue;
             }
             for (KeyValue kv : delete.getFamilyMap().get(family)) {
-                if (kv.isDelete()) {
+                if (kv.isDeleteFamily()) {
                     data.get(row).get(kv.getFamily()).clear();
                 } else {
                     data.get(row).get(kv.getFamily()).remove(kv.getQualifier());
@@ -580,11 +592,6 @@ public class MockHTable implements Table {
         return false;
     }
 
-    @Override
-    public boolean checkAndDelete(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, byte[] bytes3, Delete delete) throws IOException {
-        return false;
-    }
-
     /**
      * {@inheritDoc}
      */
@@ -598,7 +605,7 @@ public class MockHTable implements Table {
      */
     @Override
     public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount) throws IOException {
-        return incrementColumnValue(row, family, qualifier, amount, null);
+        return incrementColumnValue(row, family, qualifier, amount, true);
     }
 
     @Override
@@ -610,6 +617,37 @@ public class MockHTable implements Table {
      * {@inheritDoc}
      */
     @Override
+    public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount, boolean writeToWAL) throws IOException {
+        if (check(row, family, qualifier, null)) {
+            Put put = new Put(row);
+            put.add(family, qualifier, Bytes.toBytes(amount));
+            put(put);
+            return amount;
+        }
+        long newValue = Bytes.toLong(data.get(row).get(family).get(qualifier).lastEntry().getValue()) + amount;
+        data.get(row).get(family).get(qualifier).put(System.currentTimeMillis(), Bytes.toBytes(newValue));
+        return newValue;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public boolean isAutoFlush() {
+        return true;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public void flushCommits() throws IOException {
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
     public void close() throws IOException {
     }
 
@@ -635,6 +673,29 @@ public class MockHTable implements Table {
      * {@inheritDoc}
      */
     @Override
+    public void setAutoFlush(boolean autoFlush) {
+        throw new NotImplementedException();
+
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public void setAutoFlush(boolean autoFlush, boolean clearBufferOnFail) {
+        throw new NotImplementedException();
+
+    }
+
+    @Override
+    public void setAutoFlushTo(boolean autoFlush) {
+        throw new NotImplementedException();
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
     public long getWriteBufferSize() {
         throw new NotImplementedException();
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
index d1a1384..1d520c4 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
@@ -21,8 +21,7 @@ package org.apache.kylin.rest.security;
 import java.io.IOException;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.rest.service.AclService;
 import org.apache.kylin.rest.service.QueryService;
@@ -59,11 +58,11 @@ public class RealAclHBaseStorage implements AclHBaseStorage {
     }
 
     @Override
-    public Table getTable(String tableName) throws IOException {
+    public HTableInterface getTable(String tableName) throws IOException {
         if (StringUtils.equals(tableName, aclTableName)) {
-            return HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(aclTableName));
+            return HBaseConnection.get(hbaseUrl).getTable(aclTableName);
         } else if (StringUtils.equals(tableName, userTableName)) {
-            return HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
+            return HBaseConnection.get(hbaseUrl).getTable(userTableName);
         } else {
             throw new IllegalStateException("getTable failed" + tableName);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java b/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
index 3e3efec..d693a67 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
@@ -33,7 +33,7 @@ import javax.annotation.PostConstruct;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
@@ -124,7 +124,7 @@ public class AclService implements MutableAclService {
     @Override
     public List<ObjectIdentity> findChildren(ObjectIdentity parentIdentity) {
         List<ObjectIdentity> oids = new ArrayList<ObjectIdentity>();
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
 
@@ -173,7 +173,7 @@ public class AclService implements MutableAclService {
     @Override
     public Map<ObjectIdentity, Acl> readAclsById(List<ObjectIdentity> oids, List<Sid> sids) throws NotFoundException {
         Map<ObjectIdentity, Acl> aclMaps = new HashMap<ObjectIdentity, Acl>();
-        Table htable = null;
+        HTableInterface htable = null;
         Result result = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
@@ -226,16 +226,17 @@ public class AclService implements MutableAclService {
         Authentication auth = SecurityContextHolder.getContext().getAuthentication();
         PrincipalSid sid = new PrincipalSid(auth);
 
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
 
             Put put = new Put(Bytes.toBytes(String.valueOf(objectIdentity.getIdentifier())));
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN), Bytes.toBytes(objectIdentity.getType()));
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN), sidSerializer.serialize(new SidInfo(sid)));
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN), Bytes.toBytes(true));
+            put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN), Bytes.toBytes(objectIdentity.getType()));
+            put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN), sidSerializer.serialize(new SidInfo(sid)));
+            put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN), Bytes.toBytes(true));
 
             htable.put(put);
+            htable.flushCommits();
 
             logger.debug("ACL of " + objectIdentity + " created successfully.");
         } catch (IOException e) {
@@ -249,7 +250,7 @@ public class AclService implements MutableAclService {
 
     @Override
     public void deleteAcl(ObjectIdentity objectIdentity, boolean deleteChildren) throws ChildrenExistException {
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
 
@@ -265,6 +266,7 @@ public class AclService implements MutableAclService {
             }
 
             htable.delete(delete);
+            htable.flushCommits();
 
             logger.debug("ACL of " + objectIdentity + " deleted successfully.");
         } catch (IOException e) {
@@ -282,7 +284,7 @@ public class AclService implements MutableAclService {
             throw e;
         }
 
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
 
@@ -293,16 +295,17 @@ public class AclService implements MutableAclService {
             Put put = new Put(Bytes.toBytes(String.valueOf(acl.getObjectIdentity().getIdentifier())));
 
             if (null != acl.getParentAcl()) {
-                put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN), domainObjSerializer.serialize(new DomainObjectInfo(acl.getParentAcl().getObjectIdentity())));
+                put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN), domainObjSerializer.serialize(new DomainObjectInfo(acl.getParentAcl().getObjectIdentity())));
             }
 
             for (AccessControlEntry ace : acl.getEntries()) {
                 AceInfo aceInfo = new AceInfo(ace);
-                put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY), Bytes.toBytes(aceInfo.getSidInfo().getSid()), aceSerializer.serialize(aceInfo));
+                put.add(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY), Bytes.toBytes(aceInfo.getSidInfo().getSid()), aceSerializer.serialize(aceInfo));
             }
 
             if (!put.isEmpty()) {
                 htable.put(put);
+                htable.flushCommits();
 
                 logger.debug("ACL of " + acl.getObjectIdentity() + " updated successfully.");
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
index c8c87cb..d28c87c 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -27,7 +27,9 @@ import java.util.List;
 import java.util.Map;
 import java.util.WeakHashMap;
 
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.HTable;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.cube.CubeInstance;
@@ -393,24 +395,33 @@ public class CubeService extends BasicService {
         if (htableInfoCache.containsKey(tableName)) {
             return htableInfoCache.get(tableName);
         }
-        Connection conn = HBaseConnection.get(this.getConfig().getStorageUrl());
+
+        Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
+        HTable table = null;
         HBaseResponse hr = null;
         long tableSize = 0;
         int regionCount = 0;
 
-        HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(tableName, conn);
-        Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
+        try {
+            table = new HTable(hconf, tableName);
 
-        for (long s : sizeMap.values()) {
-            tableSize += s;
-        }
+            HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(table);
+            Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
 
-        regionCount = sizeMap.size();
+            for (long s : sizeMap.values()) {
+                tableSize += s;
+            }
+
+            regionCount = sizeMap.size();
+
+            // Set response.
+            hr = new HBaseResponse();
+            hr.setTableSize(tableSize);
+            hr.setRegionCount(regionCount);
+        } finally {
+            IOUtils.closeQuietly(table);
+        }
 
-        // Set response.
-        hr = new HBaseResponse();
-        hr.setTableSize(tableSize);
-        hr.setRegionCount(regionCount);
         htableInfoCache.put(tableName, hr);
 
         return hr;

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
index 4c02aa4..970cd56 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -49,11 +49,11 @@ import javax.sql.DataSource;
 import org.apache.calcite.avatica.ColumnMetaData.Rep;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.QueryContext;
 import org.apache.kylin.common.debug.BackdoorToggles;
@@ -164,13 +164,14 @@ public class QueryService extends BasicService {
         Query[] queryArray = new Query[queries.size()];
 
         byte[] bytes = querySerializer.serialize(queries.toArray(queryArray));
-        Table htable = null;
+        HTableInterface htable = null;
         try {
-            htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
+            htable = HBaseConnection.get(hbaseUrl).getTable(userTableName);
             Put put = new Put(Bytes.toBytes(creator));
-            put.addColumn(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
+            put.add(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
 
             htable.put(put);
+            htable.flushCommits();
         } finally {
             IOUtils.closeQuietly(htable);
         }
@@ -196,13 +197,14 @@ public class QueryService extends BasicService {
 
         Query[] queryArray = new Query[queries.size()];
         byte[] bytes = querySerializer.serialize(queries.toArray(queryArray));
-        Table htable = null;
+        HTableInterface htable = null;
         try {
-            htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
+            htable = HBaseConnection.get(hbaseUrl).getTable(userTableName);
             Put put = new Put(Bytes.toBytes(creator));
-            put.addColumn(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
+            put.add(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
 
             htable.put(put);
+            htable.flushCommits();
         } finally {
             IOUtils.closeQuietly(htable);
         }
@@ -214,12 +216,12 @@ public class QueryService extends BasicService {
         }
 
         List<Query> queries = new ArrayList<Query>();
-        Table htable = null;
+        HTableInterface htable = null;
         try {
-            org.apache.hadoop.hbase.client.Connection conn = HBaseConnection.get(hbaseUrl);
+            HConnection conn = HBaseConnection.get(hbaseUrl);
             HBaseConnection.createHTableIfNeeded(conn, userTableName, USER_QUERY_FAMILY);
 
-            htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
+            htable = conn.getTable(userTableName);
             Get get = new Get(Bytes.toBytes(creator));
             get.addFamily(Bytes.toBytes(USER_QUERY_FAMILY));
             Result result = htable.get(get);

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java b/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
index ab54882..07c7c6f 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
@@ -30,11 +30,11 @@ import javax.annotation.PostConstruct;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.rest.security.AclHBaseStorage;
@@ -72,7 +72,7 @@ public class UserService implements UserDetailsManager {
 
     @Override
     public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);
 
@@ -144,16 +144,16 @@ public class UserService implements UserDetailsManager {
 
     @Override
     public void updateUser(UserDetails user) {
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);
 
             Pair<byte[], byte[]> pair = userToHBaseRow(user);
             Put put = new Put(pair.getKey());
-
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN), pair.getSecond());
+            put.add(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN), pair.getSecond());
 
             htable.put(put);
+            htable.flushCommits();
         } catch (IOException e) {
             throw new RuntimeException(e.getMessage(), e);
         } finally {
@@ -163,13 +163,14 @@ public class UserService implements UserDetailsManager {
 
     @Override
     public void deleteUser(String username) {
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);
 
             Delete delete = new Delete(Bytes.toBytes(username));
 
             htable.delete(delete);
+            htable.flushCommits();
         } catch (IOException e) {
             throw new RuntimeException(e.getMessage(), e);
         } finally {
@@ -184,7 +185,7 @@ public class UserService implements UserDetailsManager {
 
     @Override
     public boolean userExists(String username) {
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);
 
@@ -215,7 +216,7 @@ public class UserService implements UserDetailsManager {
         s.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN));
 
         List<UserDetails> all = new ArrayList<UserDetails>();
-        Table htable = null;
+        HTableInterface htable = null;
         ResultScanner scanner = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
index 73f31c5..2351412 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
@@ -39,9 +39,9 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
 import org.apache.hadoop.hbase.util.Threads;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.kylin.common.KylinConfig;
@@ -63,7 +63,7 @@ public class HBaseConnection {
     private static final Logger logger = LoggerFactory.getLogger(HBaseConnection.class);
 
     private static final Map<String, Configuration> configCache = new ConcurrentHashMap<String, Configuration>();
-    private static final Map<String, Connection> connPool = new ConcurrentHashMap<String, Connection>();
+    private static final Map<String, HConnection> connPool = new ConcurrentHashMap<String, HConnection>();
     private static final ThreadLocal<Configuration> configThreadLocal = new ThreadLocal<>();
 
     private static ExecutorService coprocessorPool = null;
@@ -74,7 +74,7 @@ public class HBaseConnection {
             public void run() {
                 closeCoprocessorPool();
 
-                for (Connection conn : connPool.values()) {
+                for (HConnection conn : connPool.values()) {
                     try {
                         conn.close();
                     } catch (IOException e) {
@@ -143,7 +143,7 @@ public class HBaseConnection {
         // using a hbase:xxx URL is deprecated, instead hbase config is always loaded from hbase-site.xml in classpath
         if (!(StringUtils.isEmpty(url) || "hbase".equals(url)))
             throw new IllegalArgumentException("to use hbase storage, pls set 'kylin.storage.url=hbase' in kylin.properties");
-
+        
         Configuration conf = HBaseConfiguration.create(HadoopUtil.getCurrentConfiguration());
         addHBaseClusterNNHAConfiguration(conf);
 
@@ -207,9 +207,9 @@ public class HBaseConnection {
 
     // ============================================================================
 
-    // returned Connection can be shared by multiple threads and does not require close()
+    // returned HConnection can be shared by multiple threads and does not require close()
     @SuppressWarnings("resource")
-    public static Connection get(String url) {
+    public static HConnection get(String url) {
         // find configuration
         Configuration conf = configCache.get(url);
         if (conf == null) {
@@ -217,13 +217,13 @@ public class HBaseConnection {
             configCache.put(url, conf);
         }
 
-        Connection connection = connPool.get(url);
+        HConnection connection = connPool.get(url);
         try {
             while (true) {
                 // I don't use DCL since recreate a connection is not a big issue.
                 if (connection == null || connection.isClosed()) {
                     logger.info("connection is null or closed, creating a new one");
-                    connection = ConnectionFactory.createConnection(conf);
+                    connection = HConnectionManager.createConnection(conf);
                     connPool.put(url, connection);
                 }
 
@@ -242,8 +242,8 @@ public class HBaseConnection {
         return connection;
     }
 
-    public static boolean tableExists(Connection conn, String tableName) throws IOException {
-        Admin hbase = conn.getAdmin();
+    public static boolean tableExists(HConnection conn, String tableName) throws IOException {
+        HBaseAdmin hbase = new HBaseAdmin(conn);
         try {
             return hbase.tableExists(TableName.valueOf(tableName));
         } finally {
@@ -263,18 +263,18 @@ public class HBaseConnection {
         deleteTable(HBaseConnection.get(hbaseUrl), tableName);
     }
 
-    public static void createHTableIfNeeded(Connection conn, String table, String... families) throws IOException {
-        Admin hbase = conn.getAdmin();
-        TableName tableName = TableName.valueOf(table);
+    public static void createHTableIfNeeded(HConnection conn, String table, String... families) throws IOException {
+        HBaseAdmin hbase = new HBaseAdmin(conn);
+
         try {
             if (tableExists(conn, table)) {
                 logger.debug("HTable '" + table + "' already exists");
-                Set<String> existingFamilies = getFamilyNames(hbase.getTableDescriptor(tableName));
+                Set<String> existingFamilies = getFamilyNames(hbase.getTableDescriptor(TableName.valueOf(table)));
                 boolean wait = false;
                 for (String family : families) {
                     if (existingFamilies.contains(family) == false) {
                         logger.debug("Adding family '" + family + "' to HTable '" + table + "'");
-                        hbase.addColumn(tableName, newFamilyDescriptor(family));
+                        hbase.addColumn(table, newFamilyDescriptor(family));
                         // addColumn() is async, is there a way to wait it finish?
                         wait = true;
                     }
@@ -327,8 +327,8 @@ public class HBaseConnection {
         return fd;
     }
 
-    public static void deleteTable(Connection conn, String tableName) throws IOException {
-        Admin hbase = conn.getAdmin();
+    public static void deleteTable(HConnection conn, String tableName) throws IOException {
+        HBaseAdmin hbase = new HBaseAdmin(conn);
 
         try {
             if (!tableExists(conn, tableName)) {
@@ -338,10 +338,10 @@ public class HBaseConnection {
 
             logger.debug("delete HTable '" + tableName + "'");
 
-            if (hbase.isTableEnabled(TableName.valueOf(tableName))) {
-                hbase.disableTable(TableName.valueOf(tableName));
+            if (hbase.isTableEnabled(tableName)) {
+                hbase.disableTable(tableName);
             }
-            hbase.deleteTable(TableName.valueOf(tableName));
+            hbase.deleteTable(tableName);
 
             logger.debug("HTable '" + tableName + "' deleted");
         } finally {

http://git-wip-us.apache.org/repos/asf/kylin/blob/f5432f21/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
index 74ab017..a44de4f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
@@ -30,15 +30,14 @@ import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.FilterList;
@@ -69,7 +68,7 @@ public class HBaseResourceStore extends ResourceStore {
     final String tableNameBase;
     final String hbaseUrl;
 
-    Connection getConnection() throws IOException {
+    HConnection getConnection() throws IOException {
         return HBaseConnection.get(hbaseUrl);
     }
 
@@ -121,7 +120,7 @@ public class HBaseResourceStore extends ResourceStore {
         byte[] endRow = Bytes.toBytes(lookForPrefix);
         endRow[endRow.length - 1]++;
 
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        HTableInterface table = getConnection().getTable(getAllInOneTableName());
         Scan scan = new Scan(startRow, endRow);
         if ((filter != null && filter instanceof KeyOnlyFilter) == false) {
             scan.addColumn(B_FAMILY, B_COLUMN_TS);
@@ -238,12 +237,13 @@ public class HBaseResourceStore extends ResourceStore {
         IOUtils.copy(content, bout);
         bout.close();
 
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        HTableInterface table = getConnection().getTable(getAllInOneTableName());
         try {
             byte[] row = Bytes.toBytes(resPath);
             Put put = buildPut(resPath, ts, row, bout.toByteArray(), table);
 
             table.put(put);
+            table.flushCommits();
         } finally {
             IOUtils.closeQuietly(table);
         }
@@ -251,7 +251,7 @@ public class HBaseResourceStore extends ResourceStore {
 
     @Override
     protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldTS, long newTS) throws IOException, IllegalStateException {
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        HTableInterface table = getConnection().getTable(getAllInOneTableName());
         try {
             byte[] row = Bytes.toBytes(resPath);
             byte[] bOldTS = oldTS == 0 ? null : Bytes.toBytes(oldTS);
@@ -264,6 +264,8 @@ public class HBaseResourceStore extends ResourceStore {
                 throw new IllegalStateException("Overwriting conflict " + resPath + ", expect old TS " + oldTS + ", but it is " + real);
             }
 
+            table.flushCommits();
+
             return newTS;
         } finally {
             IOUtils.closeQuietly(table);
@@ -272,7 +274,7 @@ public class HBaseResourceStore extends ResourceStore {
 
     @Override
     protected void deleteResourceImpl(String resPath) throws IOException {
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        HTableInterface table = getConnection().getTable(getAllInOneTableName());
         try {
             boolean hdfsResourceExist = false;
             Result result = internalGetFromHTable(table, resPath, true, false);
@@ -285,6 +287,7 @@ public class HBaseResourceStore extends ResourceStore {
 
             Delete del = new Delete(Bytes.toBytes(resPath));
             table.delete(del);
+            table.flushCommits();
 
             if (hdfsResourceExist) { // remove hdfs cell value
                 Path redirectPath = bigCellHDFSPath(resPath);
@@ -305,7 +308,7 @@ public class HBaseResourceStore extends ResourceStore {
     }
 
     private Result getFromHTable(String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        HTableInterface table = getConnection().getTable(getAllInOneTableName());
         try {
             return internalGetFromHTable(table, path, fetchContent, fetchTimestamp);
         } finally {
@@ -314,7 +317,7 @@ public class HBaseResourceStore extends ResourceStore {
 
     }
 
-    private Result internalGetFromHTable(Table table, String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
+    private Result internalGetFromHTable(HTableInterface table, String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
         byte[] rowkey = Bytes.toBytes(path);
 
         Get get = new Get(rowkey);
@@ -333,7 +336,7 @@ public class HBaseResourceStore extends ResourceStore {
         return exists ? result : null;
     }
 
-    private Path writeLargeCellToHdfs(String resPath, byte[] largeColumn, Table table) throws IOException {
+    private Path writeLargeCellToHdfs(String resPath, byte[] largeColumn, HTableInterface table) throws IOException {
         Path redirectPath = bigCellHDFSPath(resPath);
         FileSystem fileSystem = HadoopUtil.getWorkingFileSystem();
 
@@ -358,7 +361,7 @@ public class HBaseResourceStore extends ResourceStore {
         return redirectPath;
     }
 
-    private Put buildPut(String resPath, long ts, byte[] row, byte[] content, Table table) throws IOException {
+    private Put buildPut(String resPath, long ts, byte[] row, byte[] content, HTableInterface table) throws IOException {
         int kvSizeLimit = Integer.parseInt(getConnection().getConfiguration().get("hbase.client.keyvalue.maxsize", "10485760"));
         if (content.length > kvSizeLimit) {
             writeLargeCellToHdfs(resPath, content, table);
@@ -366,8 +369,8 @@ public class HBaseResourceStore extends ResourceStore {
         }
 
         Put put = new Put(row);
-        put.addColumn(B_FAMILY, B_COLUMN, content);
-        put.addColumn(B_FAMILY, B_COLUMN_TS, Bytes.toBytes(ts));
+        put.add(B_FAMILY, B_COLUMN, content);
+        put.add(B_FAMILY, B_COLUMN_TS, Bytes.toBytes(ts));
 
         return put;
     }