You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by bi...@apache.org on 2017/02/23 07:28:15 UTC

[01/11] kylin git commit: minor improvement on ITFailfastQueryTest [Forced Update!]

Repository: kylin
Updated Branches:
  refs/heads/master-hbase0.98 da9b080f9 -> f4127f474 (forced update)


minor improvement on ITFailfastQueryTest


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/4c512f00
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/4c512f00
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/4c512f00

Branch: refs/heads/master-hbase0.98
Commit: 4c512f00fc38efa6a211327c3cb32dee4b788007
Parents: 4bdb62c
Author: Hongbin Ma <ma...@apache.org>
Authored: Wed Feb 22 11:44:50 2017 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Wed Feb 22 11:44:50 2017 +0800

----------------------------------------------------------------------
 .../java/org/apache/kylin/query/ITFailfastQueryTest.java     | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/4c512f00/kylin-it/src/test/java/org/apache/kylin/query/ITFailfastQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITFailfastQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITFailfastQueryTest.java
index a3720c8..73a597a 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITFailfastQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITFailfastQueryTest.java
@@ -67,7 +67,7 @@ public class ITFailfastQueryTest extends KylinTestBase {
     public void testPartitionExceedMaxScanBytes() throws Exception {
         String key = "kylin.storage.partition.max-scan-bytes";
         long saved = KylinConfig.getInstanceFromEnv().getPartitionMaxScanBytes();
-        KylinConfig.getInstanceFromEnv().setProperty(key, "18000");//very low threshold 
+        KylinConfig.getInstanceFromEnv().setProperty(key, "1000");//very low threshold 
 
         boolean meetExpectedException = false;
         try {
@@ -96,7 +96,7 @@ public class ITFailfastQueryTest extends KylinTestBase {
     public void testPartitionNotExceedMaxScanBytes() throws Exception {
         String key = "kylin.storage.partition.max-scan-bytes";
         long saved = KylinConfig.getInstanceFromEnv().getPartitionMaxScanBytes();
-        KylinConfig.getInstanceFromEnv().setProperty(key, "20000");//enough threshold 
+        KylinConfig.getInstanceFromEnv().setProperty(key, "100000");//enough threshold 
 
         try {
             String queryFileName = getQueryFolderPrefix() + "src/test/resources/query/sql/query01.sql";
@@ -111,7 +111,7 @@ public class ITFailfastQueryTest extends KylinTestBase {
     public void testQueryExceedMaxScanBytes() throws Exception {
         String key = "kylin.query.max-scan-bytes";
         long saved = KylinConfig.getInstanceFromEnv().getQueryMaxScanBytes();
-        KylinConfig.getInstanceFromEnv().setProperty(key, "30000");//very low threshold 
+        KylinConfig.getInstanceFromEnv().setProperty(key, "1000");//very low threshold 
 
         boolean meetExpectedException = false;
         try {
@@ -140,7 +140,7 @@ public class ITFailfastQueryTest extends KylinTestBase {
     public void testQueryNotExceedMaxScanBytes() throws Exception {
         String key = "kylin.query.max-scan-bytes";
         long saved = KylinConfig.getInstanceFromEnv().getQueryMaxScanBytes();
-        KylinConfig.getInstanceFromEnv().setProperty(key, "40000");//enough threshold 
+        KylinConfig.getInstanceFromEnv().setProperty(key, "100000");//enough threshold 
 
         try {
             String queryFileName = getQueryFolderPrefix() + "src/test/resources/query/sql/query01.sql";


[07/11] kylin git commit: KYLIN-2456 review

Posted by bi...@apache.org.
KYLIN-2456 review


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/730f5d7c
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/730f5d7c
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/730f5d7c

Branch: refs/heads/master-hbase0.98
Commit: 730f5d7ca296ad82e8bb89568dff3d010c9ad9c8
Parents: 4332b3d
Author: Hongbin Ma <ma...@apache.org>
Authored: Thu Feb 23 14:03:27 2017 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Thu Feb 23 14:04:12 2017 +0800

----------------------------------------------------------------------
 .../calcite/sql2rel/SqlToRelConverter.java      | 36 ++++++++++----------
 1 file changed, 18 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/730f5d7c/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
----------------------------------------------------------------------
diff --git a/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java b/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
index a7be94f..02c45a0 100644
--- a/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
+++ b/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
@@ -595,14 +595,14 @@ public class SqlToRelConverter {
 
     /* OVERRIDE POINT */
     private RelRoot hackSelectStar(SqlNode query, RelRoot root) {
-        /*
-         * Rel tree is like:
-         *
-         *   LogicalSort (optional)
-         *    |- LogicalProject
-         *        |- LogicalFilter (optional)
-         *            |- OLAPTableScan or LogicalJoin
-         */
+//        /*
+//         * Rel tree is like:
+//         *
+//         *   LogicalSort (optional)
+//         *    |- LogicalProject
+//         *        |- LogicalFilter (optional)
+//         *            |- OLAPTableScan or LogicalJoin
+//         */
         LogicalProject rootPrj = null;
         LogicalSort rootSort = null;
         if (root.rel instanceof LogicalProject) {
@@ -613,16 +613,16 @@ public class SqlToRelConverter {
         } else {
             return root;
         }
-
+//
         RelNode input = rootPrj.getInput();
-        if (!(//
-                isAmong(input, "OLAPTableScan", "LogicalJoin")//
-                || (isAmong(input, "LogicalFilter") && isAmong(input.getInput(0), "OLAPTableScan", "LogicalJoin"))//
-             ))
-            return root;
-
-        if (rootPrj.getRowType().getFieldCount() < input.getRowType().getFieldCount())
-            return root;
+//        if (!(//
+//                isAmong(input, "OLAPTableScan", "LogicalJoin")//
+//                || (isAmong(input, "LogicalFilter") && isAmong(input.getInput(0), "OLAPTableScan", "LogicalJoin"))//
+//             ))
+//            return root;
+//
+//        if (rootPrj.getRowType().getFieldCount() < input.getRowType().getFieldCount())
+//            return root;
 
         RelDataType inType = rootPrj.getRowType();
         List<String> inFields = inType.getFieldNames();
@@ -5298,4 +5298,4 @@ public class SqlToRelConverter {
     }
 }
 
-// End SqlToRelConverter.java
\ No newline at end of file
+// End SqlToRelConverter.java


[06/11] kylin git commit: KYLIN 1875 table alias modify

Posted by bi...@apache.org.
KYLIN 1875 table alias modify

Signed-off-by: Li Yang <li...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/37aab3c6
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/37aab3c6
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/37aab3c6

Branch: refs/heads/master-hbase0.98
Commit: 37aab3c69ef78633367718a5fd85caa398820f2f
Parents: 77df9dc
Author: chenzhx <34...@qq.com>
Authored: Wed Feb 22 17:32:14 2017 +0800
Committer: Li Yang <li...@apache.org>
Committed: Thu Feb 23 11:32:29 2017 +0800

----------------------------------------------------------------------
 webapp/app/js/controllers/modelDataModel.js | 8 ++------
 1 file changed, 2 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/37aab3c6/webapp/app/js/controllers/modelDataModel.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/controllers/modelDataModel.js b/webapp/app/js/controllers/modelDataModel.js
index cb3305a..15fb20a 100644
--- a/webapp/app/js/controllers/modelDataModel.js
+++ b/webapp/app/js/controllers/modelDataModel.js
@@ -95,13 +95,9 @@ KylinApp.controller('ModelDataModelCtrl', function ($location,$scope, $modal,cub
     };
 
     $scope.$watch('newLookup.alias',function(newValue,oldValue){
-      if(!newValue){
-        return;
-      }else{
-        for(var i=0;i<$scope.newLookup.join.primary_key.length;i++){
-          $scope.newLookup.join.primary_key[i] = $scope.newLookup.join.primary_key[i].replace(oldValue+'.',newValue+'.');
+      for(var i=0;i<$scope.newLookup.join.primary_key.length;i++){
+          $scope.newLookup.join.primary_key[i] = $scope.newLookup.join.primary_key[i].replace(/^.*?\./,newValue+'.');
         }
-      }
     });
     $scope.editLookup = function (lookup) {
         $scope.lookupState.editingIndex = lookupList.indexOf(lookup);


[09/11] kylin git commit: minor, replace system out in kylin-it with slf4j

Posted by bi...@apache.org.
minor, replace system out in kylin-it with slf4j


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/6cf9749e
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/6cf9749e
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/6cf9749e

Branch: refs/heads/master-hbase0.98
Commit: 6cf9749e23ef784eebc4a0604632eaf1c9efc301
Parents: 730f5d7
Author: Hongbin Ma <ma...@apache.org>
Authored: Thu Feb 23 14:04:07 2017 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Thu Feb 23 14:04:12 2017 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/common/util/ClassUtil.java |  6 +-
 kylin-it/pom.xml                                | 45 ++++++++++---
 .../apache/kylin/query/ITCombinationTest.java   | 10 ++-
 .../apache/kylin/query/ITFailfastQueryTest.java |  8 ++-
 .../apache/kylin/query/ITKylinQueryTest.java    | 19 +++---
 .../apache/kylin/query/ITMassInQueryTest.java   | 18 +++--
 .../org/apache/kylin/query/KylinTestBase.java   | 70 ++++++++++----------
 7 files changed, 110 insertions(+), 66 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/6cf9749e/core-common/src/main/java/org/apache/kylin/common/util/ClassUtil.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/ClassUtil.java b/core-common/src/main/java/org/apache/kylin/common/util/ClassUtil.java
index 9af34c3..35a3277 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/ClassUtil.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/ClassUtil.java
@@ -29,12 +29,16 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.WeakHashMap;
 
+import org.slf4j.LoggerFactory;
+
 /**
  */
 public class ClassUtil {
 
+    private static final org.slf4j.Logger logger = LoggerFactory.getLogger(ClassUtil.class);
+
     public static void addClasspath(String path) {
-        System.out.println("Adding path " + path + " to class path");
+        logger.info("Adding path " + path + " to class path");
         File file = new File(path);
 
         try {

http://git-wip-us.apache.org/repos/asf/kylin/blob/6cf9749e/kylin-it/pom.xml
----------------------------------------------------------------------
diff --git a/kylin-it/pom.xml b/kylin-it/pom.xml
index d58a895..e039c3c 100644
--- a/kylin-it/pom.xml
+++ b/kylin-it/pom.xml
@@ -18,7 +18,8 @@
 -->
 
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
@@ -241,6 +242,18 @@
             <scope>provided</scope>
         </dependency>
 
+        <!-- log dependency -->
+        <dependency>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-log4j12</artifactId>
+            <scope>test</scope>
+        </dependency>
+
         <!-- Spark dependency -->
         <dependency>
             <groupId>org.apache.spark</groupId>
@@ -294,10 +307,14 @@
                             <systemProperties>
                                 <property>
                                     <name>log4j.configuration</name>
-                                    <value>file:${project.basedir}/..//build/conf/kylin-tools-log4j.properties</value>
+                                    <value>
+                                        file:${project.basedir}/..//build/conf/kylin-tools-log4j.properties
+                                    </value>
                                 </property>
                             </systemProperties>
-                            <argLine>-Xms1G -Xmx2G -XX:PermSize=128M -XX:MaxPermSize=512M -Dkylin.server.cluster-servers=localhost:7070</argLine>
+                            <argLine>-Xms1G -Xmx2G -XX:PermSize=128M -XX:MaxPermSize=512M
+                                -Dkylin.server.cluster-servers=localhost:7070
+                            </argLine>
                         </configuration>
                     </plugin>
                     <plugin>
@@ -317,12 +334,17 @@
                                     <arguments>
                                         <argument>-Dhdp.version=${hdp.version}</argument>
                                         <argument>-DfastBuildMode=${fastBuildMode}</argument>
-                                        <argument>-DbuildCubeUsingProvidedData=${buildCubeUsingProvidedData}</argument>
+                                        <argument>
+                                            -DbuildCubeUsingProvidedData=${buildCubeUsingProvidedData}
+                                        </argument>
                                         <argument>-DengineType=${engineType}</argument>
-                                        <argument>-Dlog4j.configuration=file:${project.basedir}/..//build/conf/kylin-tools-log4j.properties</argument>
+                                        <argument>
+                                            -Dlog4j.configuration=file:${project.basedir}/..//build/conf/kylin-tools-log4j.properties
+                                        </argument>
                                         <argument>-classpath</argument>
                                         <classpath/>
-                                        <argument>org.apache.kylin.provision.BuildCubeWithEngine</argument>
+                                        <argument>org.apache.kylin.provision.BuildCubeWithEngine
+                                        </argument>
                                     </arguments>
                                     <workingDirectory>${project.basedir}</workingDirectory>
                                 </configuration>
@@ -340,11 +362,16 @@
                                     <arguments>
                                         <argument>-Dhdp.version=${hdp.version}</argument>
                                         <argument>-DfastBuildMode=${fastBuildMode}</argument>
-                                        <argument>-DbuildCubeUsingProvidedData=${buildCubeUsingProvidedData}</argument>
-                                        <argument>-Dlog4j.configuration=file:${project.basedir}/..//build/conf/kylin-tools-log4j.properties</argument>
+                                        <argument>
+                                            -DbuildCubeUsingProvidedData=${buildCubeUsingProvidedData}
+                                        </argument>
+                                        <argument>
+                                            -Dlog4j.configuration=file:${project.basedir}/..//build/conf/kylin-tools-log4j.properties
+                                        </argument>
                                         <argument>-classpath</argument>
                                         <classpath/>
-                                        <argument>org.apache.kylin.provision.BuildCubeWithStream</argument>
+                                        <argument>org.apache.kylin.provision.BuildCubeWithStream
+                                        </argument>
                                     </arguments>
                                     <workingDirectory>${project.basedir}</workingDirectory>
                                 </configuration>

http://git-wip-us.apache.org/repos/asf/kylin/blob/6cf9749e/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
index 23eadaf..0bed5ed 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITCombinationTest.java
@@ -30,6 +30,8 @@ import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.Maps;
 
@@ -38,6 +40,8 @@ import com.google.common.collect.Maps;
 @RunWith(Parameterized.class)
 public class ITCombinationTest extends ITKylinQueryTest {
 
+    private static final Logger logger = LoggerFactory.getLogger(ITCombinationTest.class);
+
     @BeforeClass
     public static void setUp() throws SQLException {
         Map<RealizationType, Integer> priorities = Maps.newHashMap();
@@ -45,12 +49,12 @@ public class ITCombinationTest extends ITKylinQueryTest {
         priorities.put(RealizationType.CUBE, 0);
         Candidate.setPriorities(priorities);
 
-        printInfo("setUp in ITCombinationTest");
+        logger.info("setUp in ITCombinationTest");
     }
 
     @AfterClass
     public static void tearDown() {
-        printInfo("tearDown in ITCombinationTest");
+        logger.info("tearDown in ITCombinationTest");
         clean();
         Candidate.restorePriorities();
     }
@@ -70,7 +74,7 @@ public class ITCombinationTest extends ITKylinQueryTest {
 
     public ITCombinationTest(String joinType, String coprocessorToggle, String queryEngine) throws Exception {
 
-        printInfo("Into combination join type: " + joinType + ", coprocessor toggle: " + coprocessorToggle + ", query engine: " + queryEngine);
+        logger.info("Into combination join type: " + joinType + ", coprocessor toggle: " + coprocessorToggle + ", query engine: " + queryEngine);
 
         ITKylinQueryTest.clean();
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/6cf9749e/kylin-it/src/test/java/org/apache/kylin/query/ITFailfastQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITFailfastQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITFailfastQueryTest.java
index 73a597a..17b804a 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITFailfastQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITFailfastQueryTest.java
@@ -31,17 +31,21 @@ import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.Maps;
 
 public class ITFailfastQueryTest extends KylinTestBase {
 
+    private static final Logger logger = LoggerFactory.getLogger(ITFailfastQueryTest.class);
+
     @Rule
     public ExpectedException thrown = ExpectedException.none();
 
     @BeforeClass
     public static void setUp() throws Exception {
-        printInfo("setUp in ITFailfastQueryTest");
+        logger.info("setUp in ITFailfastQueryTest");
         Map<RealizationType, Integer> priorities = Maps.newHashMap();
         priorities.put(RealizationType.HYBRID, 0);
         priorities.put(RealizationType.CUBE, 0);
@@ -58,7 +62,7 @@ public class ITFailfastQueryTest extends KylinTestBase {
 
     @AfterClass
     public static void tearDown() throws Exception {
-        printInfo("tearDown in ITFailfastQueryTest");
+        logger.info("tearDown in ITFailfastQueryTest");
         Candidate.restorePriorities();
         clean();
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/6cf9749e/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
index 1158704..cdaa10b 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
@@ -44,18 +44,22 @@ import org.junit.Ignore;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.Maps;
 
 @Ignore("KylinQueryTest is contained by ITCombinationTest")
 public class ITKylinQueryTest extends KylinTestBase {
 
+    private static final Logger logger = LoggerFactory.getLogger(ITKylinQueryTest.class);
+
     @Rule
     public ExpectedException thrown = ExpectedException.none();
 
     @BeforeClass
     public static void setUp() throws Exception {
-        printInfo("setUp in ITKylinQueryTest");
+        logger.info("setUp in ITKylinQueryTest");
         Map<RealizationType, Integer> priorities = Maps.newHashMap();
         priorities.put(RealizationType.HYBRID, 0);
         priorities.put(RealizationType.CUBE, 0);
@@ -69,7 +73,7 @@ public class ITKylinQueryTest extends KylinTestBase {
 
     @AfterClass
     public static void tearDown() throws Exception {
-        printInfo("tearDown in ITKylinQueryTest");
+        logger.info("tearDown in ITKylinQueryTest");
         Candidate.restorePriorities();
         clean();
     }
@@ -127,11 +131,10 @@ public class ITKylinQueryTest extends KylinTestBase {
 
     }
 
-    @Ignore
     @Test
     public void testSingleRunQuery() throws Exception {
 
-        String queryFileName = getQueryFolderPrefix() + "src/test/resources/query/sql_multi_model/query01.sql";
+        String queryFileName = getQueryFolderPrefix() + "src/test/resources/query/sql_verifyCount/query03.sql";
 
         File sqlFile = new File(queryFileName);
         if (sqlFile.exists()) {
@@ -301,12 +304,12 @@ public class ITKylinQueryTest extends KylinTestBase {
     @Test
     public void testInvalidQuery() throws Exception {
 
-        printInfo("-------------------- Test Invalid Query --------------------");
+        logger.info("-------------------- Test Invalid Query --------------------");
         String queryFolder = getQueryFolderPrefix() + "src/test/resources/query/sql_invalid";
         List<File> sqlFiles = getFilesFromFolder(new File(queryFolder), ".sql");
         for (File sqlFile : sqlFiles) {
             String queryName = StringUtils.split(sqlFile.getName(), '.')[0];
-            printInfo("Testing Query " + queryName);
+            logger.info("Testing Query " + queryName);
             String sql = getTextFromFile(sqlFile);
             IDatabaseConnection cubeConn = new DatabaseConnection(cubeConnection);
             try {
@@ -359,13 +362,13 @@ public class ITKylinQueryTest extends KylinTestBase {
     @Test
     public void testVersionQuery() throws Exception {
         String expectVersion = KylinVersion.getCurrentVersion().toString();
-        printInfo("---------- verify expect version: " + expectVersion);
+        logger.info("---------- verify expect version: " + expectVersion);
 
         String queryName = "QueryKylinVersion";
         String sql = "SELECT VERSION() AS version";
 
         // execute Kylin
-        printInfo("Query Result from Kylin - " + queryName);
+        logger.info("Query Result from Kylin - " + queryName);
         IDatabaseConnection kylinConn = new DatabaseConnection(cubeConnection);
         ITable kylinTable = executeQuery(kylinConn, queryName, sql, false);
         String queriedVersion = String.valueOf(kylinTable.getValue(0, "version"));

http://git-wip-us.apache.org/repos/asf/kylin/blob/6cf9749e/kylin-it/src/test/java/org/apache/kylin/query/ITMassInQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITMassInQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITMassInQueryTest.java
index 8859329..cca0be6 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITMassInQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITMassInQueryTest.java
@@ -39,6 +39,8 @@ import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.Sets;
 
@@ -46,6 +48,8 @@ import com.google.common.collect.Sets;
  */
 public class ITMassInQueryTest extends KylinTestBase {
 
+    private static final Logger logger = LoggerFactory.getLogger(ITMassInQueryTest.class);
+
     FileSystem fileSystem;
     Set<Long> vipSellers;
 
@@ -100,7 +104,7 @@ public class ITMassInQueryTest extends KylinTestBase {
     }
 
     protected void run(String queryFolder, String[] exclusiveQuerys, boolean needSort) throws Exception {
-        printInfo("---------- test folder: " + queryFolder);
+        logger.info("---------- test folder: " + queryFolder);
         Set<String> exclusiveSet = buildExclusiveSet(exclusiveQuerys);
 
         List<File> sqlFiles = getFilesFromFolder(new File(queryFolder), ".sql");
@@ -112,7 +116,7 @@ public class ITMassInQueryTest extends KylinTestBase {
             String sql = getTextFromFile(sqlFile);
 
             // execute Kylin
-            printInfo("Query Result from Kylin - " + queryName + "  (" + queryFolder + ")");
+            logger.info("Query Result from Kylin - " + queryName + "  (" + queryFolder + ")");
             IDatabaseConnection kylinConn = new DatabaseConnection(cubeConnection);
             ITable kylinTable = executeQuery(kylinConn, queryName, sql, needSort);
             printResult(kylinTable);
@@ -121,7 +125,7 @@ public class ITMassInQueryTest extends KylinTestBase {
     }
 
     protected void compare(String queryFolder, String[] exclusiveQuerys, boolean needSort) throws Exception {
-        printInfo("---------- test folder: " + queryFolder);
+        logger.info("---------- test folder: " + queryFolder);
         Set<String> exclusiveSet = buildExclusiveSet(exclusiveQuerys);
 
         List<File> sqlFiles = getFilesFromFolder(new File(queryFolder), ".sql");
@@ -133,21 +137,21 @@ public class ITMassInQueryTest extends KylinTestBase {
             String sql = getTextFromFile(sqlFile);
 
             // execute Kylin
-            printInfo("Query Result from Kylin - " + queryName + "  (" + queryFolder + ")");
+            logger.info("Query Result from Kylin - " + queryName + "  (" + queryFolder + ")");
             IDatabaseConnection kylinConn = new DatabaseConnection(cubeConnection);
             ITable kylinTable = executeQuery(kylinConn, queryName, sql, needSort);
 
             // execute H2
             sql = sql.replace("massin(test_kylin_fact.SELLER_ID,'vip_customers')", "test_kylin_fact.SELLER_ID in ( " + org.apache.commons.lang.StringUtils.join(vipSellers, ",") + ")");
-            printInfo("Query Result from H2 - " + queryName);
-            printInfo("Query for H2 - " + sql);
+            logger.info("Query Result from H2 - " + queryName);
+            logger.info("Query for H2 - " + sql);
             ITable h2Table = executeQuery(newH2Connection(), queryName, sql, needSort);
 
             try {
                 // compare the result
                 assertTableEquals(h2Table, kylinTable);
             } catch (Throwable t) {
-                printInfo("execAndCompQuery failed on: " + sqlFile.getAbsolutePath());
+                logger.info("execAndCompQuery failed on: " + sqlFile.getAbsolutePath());
                 throw t;
             }
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/6cf9749e/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
index fd04b2f..d0524c6 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
@@ -32,7 +32,6 @@ import java.sql.ResultSet;
 import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
 import java.sql.Statement;
-import java.sql.Timestamp;
 import java.sql.Types;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -65,6 +64,8 @@ import org.dbunit.dataset.xml.FlatXmlDataSetBuilder;
 import org.dbunit.ext.h2.H2Connection;
 import org.dbunit.ext.h2.H2DataTypeFactory;
 import org.junit.Assert;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.ImmutableSet;
 import com.google.common.io.Files;
@@ -73,6 +74,7 @@ import com.google.common.io.Files;
  */
 public class KylinTestBase {
 
+    private static final Logger logger = LoggerFactory.getLogger(KylinTestBase.class);
     public static boolean PRINT_RESULT = false;
 
     class ObjectArray {
@@ -181,10 +183,6 @@ public class KylinTestBase {
         return parameters;
     }
 
-    protected static void printInfo(String info) {
-        System.out.println(new Timestamp(System.currentTimeMillis()) + " - " + info);
-    }
-
     protected static void printResult(ITable resultTable) throws DataSetException {
         StringBuilder sb = new StringBuilder();
 
@@ -251,10 +249,10 @@ public class KylinTestBase {
         Statement statement = null;
         ResultSet resultSet = null;
         try {
-            printInfo("start running...");
+            logger.info("start running...");
             statement = cubeConnection.createStatement();
             resultSet = statement.executeQuery(sql);
-            printInfo("stop running...");
+            logger.info("stop running...");
 
             return output(resultSet, needDisplay);
         } finally {
@@ -319,13 +317,13 @@ public class KylinTestBase {
 
         String ret = StringUtils.join(tokens, " ");
         ret = ret.replaceAll(specialStr, System.getProperty("line.separator"));
-        System.out.println("The actual sql executed is: " + ret);
+        logger.info("The actual sql executed is: " + ret);
 
         return ret;
     }
 
     protected void execQueryUsingH2(String queryFolder, boolean needSort) throws Exception {
-        printInfo("---------- Running H2 queries: " + queryFolder);
+        logger.info("---------- Running H2 queries: " + queryFolder);
 
         List<File> sqlFiles = getFilesFromFolder(new File(queryFolder), ".sql");
         for (File sqlFile : sqlFiles) {
@@ -333,13 +331,13 @@ public class KylinTestBase {
             String sql = getTextFromFile(sqlFile);
 
             // execute H2
-            printInfo("Query Result from H2 - " + queryName);
+            logger.info("Query Result from H2 - " + queryName);
             executeQuery(newH2Connection(), queryName, sql, needSort);
         }
     }
 
     protected void verifyResultRowColCount(String queryFolder) throws Exception {
-        printInfo("---------- verify result count in folder: " + queryFolder);
+        logger.info("---------- verify result count in folder: " + queryFolder);
 
         List<File> sqlFiles = getFilesFromFolder(new File(queryFolder), ".sql");
         for (File sqlFile : sqlFiles) {
@@ -352,7 +350,7 @@ public class KylinTestBase {
             int expectColCount = pair.getSecond();
 
             // execute Kylin
-            printInfo("Query Result from Kylin - " + queryName + "  (" + queryFolder + ")");
+            logger.info("Query Result from Kylin - " + queryName + "  (" + queryFolder + ")");
             IDatabaseConnection kylinConn = new DatabaseConnection(cubeConnection);
             ITable kylinTable = executeQuery(kylinConn, queryName, sql, false);
 
@@ -381,7 +379,7 @@ public class KylinTestBase {
     }
 
     protected void verifyResultContent(String queryFolder) throws Exception {
-        printInfo("---------- verify result content in folder: " + queryFolder);
+        logger.info("---------- verify result content in folder: " + queryFolder);
 
         List<File> sqlFiles = getFilesFromFolder(new File(queryFolder), ".sql");
         for (File sqlFile : sqlFiles) {
@@ -394,7 +392,7 @@ public class KylinTestBase {
             ITable expectTable = expect.getTable("expect");
 
             // execute Kylin
-            printInfo("Query Result from Kylin - " + queryName + "  (" + queryFolder + ")");
+            logger.info("Query Result from Kylin - " + queryName + "  (" + queryFolder + ")");
             IDatabaseConnection kylinConn = new DatabaseConnection(cubeConnection);
             ITable kylinTable = executeQuery(kylinConn, queryName, sql, false);
 
@@ -404,7 +402,7 @@ public class KylinTestBase {
     }
 
     protected void execAndCompResultSize(String queryFolder, String[] exclusiveQuerys, boolean needSort) throws Exception {
-        printInfo("---------- test folder: " + queryFolder);
+        logger.info("---------- test folder: " + queryFolder);
         Set<String> exclusiveSet = buildExclusiveSet(exclusiveQuerys);
 
         List<File> sqlFiles = getFilesFromFolder(new File(queryFolder), ".sql");
@@ -416,19 +414,19 @@ public class KylinTestBase {
             String sql = getTextFromFile(sqlFile);
 
             // execute Kylin
-            printInfo("Query Result from Kylin - " + queryName + "  (" + queryFolder + ")");
+            logger.info("Query Result from Kylin - " + queryName + "  (" + queryFolder + ")");
             IDatabaseConnection kylinConn = new DatabaseConnection(cubeConnection);
             ITable kylinTable = executeQuery(kylinConn, queryName, sql, needSort);
 
             // execute H2
-            printInfo("Query Result from H2 - " + queryName);
+            logger.info("Query Result from H2 - " + queryName);
             ITable h2Table = executeQuery(newH2Connection(), queryName, sql, needSort);
 
             try {
                 // compare the result
                 Assert.assertEquals(h2Table.getRowCount(), kylinTable.getRowCount());
             } catch (Throwable t) {
-                printInfo("execAndCompResultSize failed on: " + sqlFile.getAbsolutePath());
+                logger.info("execAndCompResultSize failed on: " + sqlFile.getAbsolutePath());
                 throw t;
             }
 
@@ -440,12 +438,12 @@ public class KylinTestBase {
     }
 
     protected void execAndCompColumnCount(String input, int expectedColumnCount) throws Exception {
-        printInfo("---------- test column count: " + input);
+        logger.info("---------- test column count: " + input);
         Set<String> sqlSet = ImmutableSet.of(input);
 
         for (String sql : sqlSet) {
             // execute Kylin
-            printInfo("Query Result from Kylin - " + sql);
+            logger.info("Query Result from Kylin - " + sql);
             IDatabaseConnection kylinConn = new DatabaseConnection(cubeConnection);
             ITable kylinTable = executeQuery(kylinConn, sql, sql, false);
 
@@ -453,14 +451,14 @@ public class KylinTestBase {
                 // compare the result
                 Assert.assertEquals(expectedColumnCount, kylinTable.getTableMetaData().getColumns().length);
             } catch (Throwable t) {
-                printInfo("execAndCompColumnCount failed on: " + sql);
+                logger.info("execAndCompColumnCount failed on: " + sql);
                 throw t;
             }
         }
     }
 
     protected void execLimitAndValidate(String queryFolder) throws Exception {
-        printInfo("---------- test folder: " + new File(queryFolder).getAbsolutePath());
+        logger.info("---------- test folder: " + new File(queryFolder).getAbsolutePath());
 
         int appendLimitQueries = 0;
         List<File> sqlFiles = getFilesFromFolder(new File(queryFolder), ".sql");
@@ -477,18 +475,18 @@ public class KylinTestBase {
             }
 
             // execute Kylin
-            printInfo("Query Result from Kylin - " + queryName + "  (" + queryFolder + ")");
+            logger.info("Query Result from Kylin - " + queryName + "  (" + queryFolder + ")");
             IDatabaseConnection kylinConn = new DatabaseConnection(cubeConnection);
             ITable kylinTable = executeQuery(kylinConn, queryName, sqlWithLimit, false);
 
             // execute H2
-            printInfo("Query Result from H2 - " + queryName);
+            logger.info("Query Result from H2 - " + queryName);
             ITable h2Table = executeQuery(newH2Connection(), queryName, sql, false);
 
             try {
                 assertTableContains(h2Table, kylinTable);
             } catch (Throwable t) {
-                printInfo("execAndCompQuery failed on: " + sqlFile.getAbsolutePath());
+                logger.info("execAndCompQuery failed on: " + sqlFile.getAbsolutePath());
                 throw t;
             }
 
@@ -497,11 +495,11 @@ public class KylinTestBase {
                 zeroResultQueries.add(sql);
             }
         }
-        printInfo("Queries appended with limit: " + appendLimitQueries);
+        logger.info("Queries appended with limit: " + appendLimitQueries);
     }
 
     protected void execAndCompQuery(String queryFolder, String[] exclusiveQuerys, boolean needSort) throws Exception {
-        printInfo("---------- test folder: " + new File(queryFolder).getAbsolutePath());
+        logger.info("---------- test folder: " + new File(queryFolder).getAbsolutePath());
         Set<String> exclusiveSet = buildExclusiveSet(exclusiveQuerys);
 
         List<File> sqlFiles = getFilesFromFolder(new File(queryFolder), ".sql");
@@ -513,21 +511,21 @@ public class KylinTestBase {
             String sql = getTextFromFile(sqlFile);
 
             // execute Kylin
-            printInfo("Query Result from Kylin - " + queryName + "  (" + queryFolder + ")");
+            logger.info("Query Result from Kylin - " + queryName + "  (" + queryFolder + ")");
             IDatabaseConnection kylinConn = new DatabaseConnection(cubeConnection);
             ITable kylinTable = executeQuery(kylinConn, queryName, sql, needSort);
 
             // execute H2
-            printInfo("Query Result from H2 - " + queryName);
+            logger.info("Query Result from H2 - " + queryName);
             long currentTime = System.currentTimeMillis();
             ITable h2Table = executeQuery(newH2Connection(), queryName, sql, needSort);
-            printInfo("H2 spent " + (System.currentTimeMillis() - currentTime) + " mili-seconds.");
+            logger.info("H2 spent " + (System.currentTimeMillis() - currentTime) + " mili-seconds.");
 
             try {
                 // compare the result
                 assertTableEquals(h2Table, kylinTable);
             } catch (Throwable t) {
-                printInfo("execAndCompQuery failed on: " + sqlFile.getAbsolutePath());
+                logger.info("execAndCompQuery failed on: " + sqlFile.getAbsolutePath());
                 throw t;
             }
 
@@ -539,7 +537,7 @@ public class KylinTestBase {
     }
 
     protected void execAndCompDynamicQuery(String queryFolder, String[] exclusiveQuerys, boolean needSort) throws Exception {
-        printInfo("---------- test folder: " + queryFolder);
+        logger.info("---------- test folder: " + queryFolder);
         Set<String> exclusiveSet = buildExclusiveSet(exclusiveQuerys);
 
         List<File> sqlFiles = getFilesFromFolder(new File(queryFolder), ".sql");
@@ -552,12 +550,12 @@ public class KylinTestBase {
             List<String> parameters = getParameterFromFile(sqlFile);
 
             // execute Kylin
-            printInfo("Query Result from Kylin - " + queryName + "  (" + queryFolder + ")");
+            logger.info("Query Result from Kylin - " + queryName + "  (" + queryFolder + ")");
             IDatabaseConnection kylinConn = new DatabaseConnection(cubeConnection);
             ITable kylinTable = executeDynamicQuery(kylinConn, queryName, sql, parameters, needSort);
 
             // execute H2
-            printInfo("Query Result from H2 - " + queryName);
+            logger.info("Query Result from H2 - " + queryName);
             ITable h2Table = executeDynamicQuery(newH2Connection(), queryName, sql, parameters, needSort);
 
             // compare the result
@@ -598,7 +596,7 @@ public class KylinTestBase {
         }
 
         String queryName = StringUtils.split(sqlFile.getName(), '.')[0];
-        printInfo("Testing Query " + queryName);
+        logger.info("Testing Query " + queryName);
         String sql = getTextFromFile(sqlFile);
         if (explain) {
             sql = "explain plan for " + sql;
@@ -651,7 +649,7 @@ public class KylinTestBase {
             }
             count++;
         }
-        printInfo(sb.toString());
+        logger.info(sb.toString());
         return count;
     }
 


[04/11] kylin git commit: KYLIN-2459 Make ResourceStore configable

Posted by bi...@apache.org.
KYLIN-2459 Make ResourceStore configable

Signed-off-by: Hongbin Ma <ma...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/82f68035
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/82f68035
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/82f68035

Branch: refs/heads/master-hbase0.98
Commit: 82f68035ceee222905e8daf443dd99585fbc4428
Parents: 6f35b62
Author: xiefan46 <95...@qq.com>
Authored: Wed Feb 22 18:07:49 2017 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Wed Feb 22 20:49:04 2017 +0800

----------------------------------------------------------------------
 .../java/org/apache/kylin/common/KylinConfigBase.java   |  5 +++++
 .../apache/kylin/common/persistence/ResourceStore.java  | 12 ++++--------
 2 files changed, 9 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/82f68035/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 13d967d..1c26c63 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -957,4 +957,9 @@ abstract public class KylinConfigBase implements Serializable {
         return getOptional("kylin.storage.lock-manager-zk-port", "2181");
     }
 
+    //ResourceStore Impl
+    public String getResourceStoreImpl() {
+        return getOptional("kylin.metadata.default-resource-store-impl", "org.apache.kylin.storage.hbase.HBaseResourceStore");
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/82f68035/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
index c441618..77143b0 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
@@ -70,19 +70,15 @@ abstract public class ResourceStore {
 
     private static final ArrayList<Class<? extends ResourceStore>> knownImpl = new ArrayList<Class<? extends ResourceStore>>();
 
-    private static ArrayList<Class<? extends ResourceStore>> getKnownImpl() {
+    private static ArrayList<Class<? extends ResourceStore>> getKnownImpl(KylinConfig kylinConfig) {
         if (knownImpl.isEmpty()) {
             knownImpl.add(FileResourceStore.class);
             try {
-                knownImpl.add(ClassUtil.forName("org.apache.kylin.storage.hbase.HBaseResourceStore", ResourceStore.class));
+                String implName = kylinConfig.getResourceStoreImpl();
+                knownImpl.add(ClassUtil.forName(implName, ResourceStore.class));
             } catch (Throwable e) {
                 logger.warn("Failed to load HBaseResourceStore impl class: " + e.toString());
             }
-            try {
-                knownImpl.add(ClassUtil.forName("org.apache.kylin.storage.hdfs.HDFSResourceStore", ResourceStore.class));
-            } catch (Throwable e) {
-                logger.warn("Failed to load HDFSResourceStore impl class: " + e.toString());
-            }
         }
         return knownImpl;
     }
@@ -90,7 +86,7 @@ abstract public class ResourceStore {
     private static ResourceStore createResourceStore(KylinConfig kylinConfig) {
         List<Throwable> es = new ArrayList<Throwable>();
         logger.info("Using metadata url " + kylinConfig.getMetadataUrl() + " for resource store");
-        for (Class<? extends ResourceStore> cls : getKnownImpl()) {
+        for (Class<? extends ResourceStore> cls : getKnownImpl(kylinConfig)) {
             try {
                 return cls.getConstructor(KylinConfig.class).newInstance(kylinConfig);
             } catch (Throwable e) {


[02/11] kylin git commit: KYLIN-1875

Posted by bi...@apache.org.
KYLIN-1875

Signed-off-by: Billy Liu <bi...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/859605fb
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/859605fb
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/859605fb

Branch: refs/heads/master-hbase0.98
Commit: 859605fb35dcfb318a562691cb0ba7a0563db0f1
Parents: 4c512f0
Author: luguosheng <55...@qq.com>
Authored: Wed Feb 22 16:39:06 2017 +0800
Committer: Billy Liu <bi...@apache.org>
Committed: Wed Feb 22 16:53:43 2017 +0800

----------------------------------------------------------------------
 webapp/app/js/controllers/cubeEdit.js       | 49 ++++--------------------
 webapp/app/js/controllers/cubeSchema.js     |  6 +--
 webapp/app/js/controllers/modelDataModel.js |  9 +++++
 webapp/app/js/model/cubeDescModel.js        |  4 +-
 4 files changed, 22 insertions(+), 46 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/859605fb/webapp/app/js/controllers/cubeEdit.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/controllers/cubeEdit.js b/webapp/app/js/controllers/cubeEdit.js
index 92d8305..2b2d97f 100755
--- a/webapp/app/js/controllers/cubeEdit.js
+++ b/webapp/app/js/controllers/cubeEdit.js
@@ -40,47 +40,7 @@ KylinApp.controller('CubeEditCtrl', function ($scope, $q, $routeParams, $locatio
     supportedEncoding:[],
     encodingMaps:{}
   }
-  TableModel.getColumnTypeEncodingMap().then(function(data){
-    $scope.store.encodingMaps=data;
-  });
-  CubeService.getValidEncodings({}, function (encodings) {
-    if(encodings){
-      for(var i in encodings)
-        if(VdmUtil.isNotExtraKey(encodings,i)){
-          var value = i
-          var name = value;
-          var typeVersion=+encodings[i]||1;
-          var suggest=false,selecttips='';
-          if(/\d+/.test(""+typeVersion)&&typeVersion>=1){
-            for(var s=1;s<=typeVersion;s++){
-              if(s==typeVersion){
-                suggest=true;
-              }
-              if(value=="int"){
-                name = "int (deprecated)";
-                suggest=false;
-              }
-              if(typeVersion>1){
-                selecttips="(v"+s;
-                if(s==typeVersion){
-                  selecttips=",suggest)"
-                }
-                selecttips=')';
-              }
-              $scope.store.supportedEncoding.push({
-                "name":name+selecttips,
-                "value":value+"[v"+s+"]",
-                "version":typeVersion,
-                "baseValue":value,
-                "suggest":suggest
-              });
-            }
-          }
-        }
-    }
-  },function(e){
-    $scope.store.supportedEncoding = $scope.cubeConfig.encodings;
-  })
+
 
   $scope.getColumnsByAlias = function (alias) {
     var temp = [];
@@ -742,6 +702,13 @@ KylinApp.controller('CubeEditCtrl', function ($scope, $q, $routeParams, $locatio
       return;
     }
     $scope.metaModel.model = modelsManager.getModel(newValue);
+    if($scope.metaModel.model){
+      $scope.modelsManager.initAliasMapByModelSchema($scope.metaModel);
+      cubesManager.init();
+      $scope.cubeMetaFrame=CubeDescModel.createNew({
+        model_name:newValue
+      })
+    }
     if(!$scope.metaModel.model){
       return;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/859605fb/webapp/app/js/controllers/cubeSchema.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/controllers/cubeSchema.js b/webapp/app/js/controllers/cubeSchema.js
index 91a1567..a912c72 100755
--- a/webapp/app/js/controllers/cubeSchema.js
+++ b/webapp/app/js/controllers/cubeSchema.js
@@ -78,11 +78,11 @@ KylinApp.controller('CubeSchemaCtrl', function ($scope, QueryService, UserServic
                 suggest=false;
               }
               if(typeVersion>1){
-                selecttips="(v"+s;
+                selecttips=" (v"+s;
                 if(s==typeVersion){
-                  selecttips=",suggest)"
+                  selecttips+=",suggest"
                 }
-                selecttips=')';
+                selecttips+=')';
               }
               $scope.store.supportedEncoding.push({
                 "name":name+selecttips,

http://git-wip-us.apache.org/repos/asf/kylin/blob/859605fb/webapp/app/js/controllers/modelDataModel.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/controllers/modelDataModel.js b/webapp/app/js/controllers/modelDataModel.js
index 02c44ea..cb3305a 100644
--- a/webapp/app/js/controllers/modelDataModel.js
+++ b/webapp/app/js/controllers/modelDataModel.js
@@ -94,6 +94,15 @@ KylinApp.controller('ModelDataModelCtrl', function ($location,$scope, $modal,cub
         };
     };
 
+    $scope.$watch('newLookup.alias',function(newValue,oldValue){
+      if(!newValue){
+        return;
+      }else{
+        for(var i=0;i<$scope.newLookup.join.primary_key.length;i++){
+          $scope.newLookup.join.primary_key[i] = $scope.newLookup.join.primary_key[i].replace(oldValue+'.',newValue+'.');
+        }
+      }
+    });
     $scope.editLookup = function (lookup) {
         $scope.lookupState.editingIndex = lookupList.indexOf(lookup);
         $scope.lookupState.editing = true;

http://git-wip-us.apache.org/repos/asf/kylin/blob/859605fb/webapp/app/js/model/cubeDescModel.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/model/cubeDescModel.js b/webapp/app/js/model/cubeDescModel.js
index 95a0b13..d0f5250 100644
--- a/webapp/app/js/model/cubeDescModel.js
+++ b/webapp/app/js/model/cubeDescModel.js
@@ -21,7 +21,7 @@ KylinApp.service('CubeDescModel', function (kylinConfig) {
   this.cubeMetaFrame = {};
 
   //
-  this.createNew = function () {
+  this.createNew = function (defaultPara) {
     var cubeMeta = {
       "name": "",
       "model_name": "",
@@ -62,7 +62,7 @@ KylinApp.service('CubeDescModel', function (kylinConfig) {
       "override_kylin_properties":{}
     };
 
-    return cubeMeta;
+    return angular.extend(cubeMeta,defaultPara) ;
   };
 
   this.createMeasure = function () {


[10/11] kylin git commit: KYLIN-2307 Create a branch for master with HBase 0.98 API

Posted by bi...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
index b12173d..51b21b3 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
@@ -26,13 +26,12 @@ import java.util.NoSuchElementException;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.BufferedMutator;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.kv.RowConstants;
@@ -87,13 +86,14 @@ public class SimpleHBaseStore implements IGTStore {
     }
 
     private class Writer implements IGTWriter {
-        final BufferedMutator table;
+        final HTableInterface table;
         final ByteBuffer rowkey = ByteBuffer.allocate(50);
         final ByteBuffer value = ByteBuffer.allocate(50);
 
         Writer() throws IOException {
-            Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-            table = conn.getBufferedMutator(htableName);
+            HConnection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+            table = conn.getTable(htableName);
+            table.setAutoFlush(false, true);
         }
 
         @Override
@@ -113,24 +113,24 @@ public class SimpleHBaseStore implements IGTStore {
 
             Put put = new Put(rowkey);
             put.addImmutable(CF_B, ByteBuffer.wrap(COL_B), HConstants.LATEST_TIMESTAMP, value);
-            table.mutate(put);
+            table.put(put);
         }
 
         @Override
         public void close() throws IOException {
-            table.flush();
+            table.flushCommits();
             table.close();
         }
     }
 
     class Reader implements IGTScanner {
-        final Table table;
+        final HTableInterface table;
         final ResultScanner scanner;
 
         int count = 0;
 
         Reader() throws IOException {
-            Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+            HConnection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
             table = conn.getTable(htableName);
 
             Scan scan = new Scan();

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
index 82b67b6..1cd6694 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
@@ -26,9 +26,8 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.zip.DataFormatException;
 
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
 import org.apache.hadoop.hbase.ipc.ServerRpcController;
@@ -52,10 +51,10 @@ import org.apache.kylin.storage.gtrecord.StorageResponseGTScatter;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest;
-import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse;
-import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse.Stats;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitService;
+import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList;
+import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse.Stats;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -117,7 +116,7 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
         final ImmutableBitSet selectedColBlocks = scanRequest.getSelectedColBlocks().set(0);
 
         // globally shared connection, does not require close
-        final Connection conn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
+        final HConnection conn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
 
         final List<IntList> hbaseColumnsToGTIntList = Lists.newArrayList();
         List<List<Integer>> hbaseColumnsToGT = getHBaseColumnsGTMapping(selectedColBlocks);
@@ -172,7 +171,7 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
                     final AtomicReference<RuntimeException> regionErrorHolder = new AtomicReference<>();
 
                     try {
-                        Table table = conn.getTable(TableName.valueOf(cubeSeg.getStorageLocationIdentifier()), HBaseConnection.getCoprocessorPool());
+                        HTableInterface table = conn.getTable(cubeSeg.getStorageLocationIdentifier(), HBaseConnection.getCoprocessorPool());
 
                         final CubeVisitRequest request = builder.build();
                         final byte[] startKey = epRange.getFirst();

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
index 33f8d90..a24d517 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
@@ -25,12 +25,11 @@ import java.util.List;
 
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.util.BytesUtil;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.common.util.ShardingHash;
@@ -132,8 +131,8 @@ public class CubeHBaseScanRPC extends CubeHBaseRPC {
         // primary key (also the 0th column block) is always selected
         final ImmutableBitSet selectedColBlocks = scanRequest.getSelectedColBlocks().set(0);
         // globally shared connection, does not require close
-        Connection hbaseConn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
-        final Table hbaseTable = hbaseConn.getTable(TableName.valueOf(cubeSeg.getStorageLocationIdentifier()));
+        HConnection hbaseConn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
+        final HTableInterface hbaseTable = hbaseConn.getTable(cubeSeg.getStorageLocationIdentifier());
 
         List<RawScan> rawScans = preparedHBaseScans(scanRequest.getGTScanRanges(), selectedColBlocks);
         List<List<Integer>> hbaseColumnsToGT = getHBaseColumnsGTMapping(selectedColBlocks);

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
index cde127e..1e550f9 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
@@ -195,7 +195,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
         if (shardLength == 0) {
             return;
         }
-        byte[] regionStartKey = ArrayUtils.isEmpty(region.getRegionInfo().getStartKey()) ? new byte[shardLength] : region.getRegionInfo().getStartKey();
+        byte[] regionStartKey = ArrayUtils.isEmpty(region.getStartKey()) ? new byte[shardLength] : region.getStartKey();
         Bytes.putBytes(rawScan.startKey, 0, regionStartKey, 0, shardLength);
         Bytes.putBytes(rawScan.endKey, 0, regionStartKey, 0, shardLength);
     }
@@ -234,7 +234,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
         try (SetThreadName ignored = new SetThreadName("Query %s", queryId)) {
             this.serviceStartTime = System.currentTimeMillis();
 
-            region = (HRegion)env.getRegion();
+            region = env.getRegion();
             region.startRegionOperation();
 
             // if user change kylin.properties on kylin server, need to manually redeploy coprocessor jar to update KylinConfig of Env.

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
index feb4842..2814ad6 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
@@ -26,8 +26,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.regionserver.BloomType;
@@ -80,8 +79,7 @@ public class CubeHTableUtil {
         tableDesc.setValue(IRealizationConstants.HTableSegmentTag, cubeSegment.toString());
 
         Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
-        Connection conn = HBaseConnection.get(kylinConfig.getStorageUrl());
-        Admin admin = conn.getAdmin();
+        HBaseAdmin admin = new HBaseAdmin(conf);
 
         try {
             if (User.isHBaseSecurityEnabled(conf)) {
@@ -94,7 +92,7 @@ public class CubeHTableUtil {
                 tableDesc.addFamily(cf);
             }
 
-            if (admin.tableExists(TableName.valueOf(tableName))) {
+            if (admin.tableExists(tableName)) {
                 // admin.disableTable(tableName);
                 // admin.deleteTable(tableName);
                 throw new RuntimeException("HBase table " + tableName + " exists!");
@@ -103,7 +101,7 @@ public class CubeHTableUtil {
             DeployCoprocessorCLI.deployCoprocessor(tableDesc);
 
             admin.createTable(tableDesc, splitKeys);
-            Preconditions.checkArgument(admin.isTableAvailable(TableName.valueOf(tableName)), "table " + tableName + " created, but is not available due to some reasons");
+            Preconditions.checkArgument(admin.isTableAvailable(tableName), "table " + tableName + " created, but is not available due to some reasons");
             logger.info("create hbase table " + tableName + " done.");
         } finally {
             IOUtils.closeQuietly(admin);
@@ -112,7 +110,8 @@ public class CubeHTableUtil {
     }
 
     public static void deleteHTable(TableName tableName) throws IOException {
-        Admin admin = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl()).getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        HBaseAdmin admin = new HBaseAdmin(conf);
         try {
             if (admin.tableExists(tableName)) {
                 logger.info("disabling hbase table " + tableName);
@@ -127,7 +126,8 @@ public class CubeHTableUtil {
 
     /** create a HTable that has the same performance settings as normal cube table, for benchmark purpose */
     public static void createBenchmarkHTable(TableName tableName, String cfName) throws IOException {
-        Admin admin = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl()).getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        HBaseAdmin admin = new HBaseAdmin(conf);
         try {
             if (admin.tableExists(tableName)) {
                 logger.info("disabling hbase table " + tableName);

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
index df3cf08..eacff9f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
@@ -25,13 +25,13 @@ import java.util.List;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.HadoopUtil;
 import org.apache.kylin.common.util.HiveCmdBuilder;
 import org.apache.kylin.job.exception.ExecuteException;
@@ -100,21 +100,19 @@ public class DeprecatedGCStep extends AbstractExecutable {
         List<String> oldTables = getOldHTables();
         if (oldTables != null && oldTables.size() > 0) {
             String metadataUrlPrefix = KylinConfig.getInstanceFromEnv().getMetadataUrlPrefix();
-            Admin admin = null;
+            Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+            HBaseAdmin admin = null;
             try {
-
-                Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-                admin = conn.getAdmin();
-
+                admin = new HBaseAdmin(conf);
                 for (String table : oldTables) {
-                    if (admin.tableExists(TableName.valueOf(table))) {
-                        HTableDescriptor tableDescriptor = admin.getTableDescriptor(TableName.valueOf(table));
+                    if (admin.tableExists(table)) {
+                        HTableDescriptor tableDescriptor = admin.getTableDescriptor(Bytes.toBytes(table));
                         String host = tableDescriptor.getValue(IRealizationConstants.HTableTag);
                         if (metadataUrlPrefix.equalsIgnoreCase(host)) {
-                            if (admin.isTableEnabled(TableName.valueOf(table))) {
-                                admin.disableTable(TableName.valueOf(table));
+                            if (admin.isTableEnabled(table)) {
+                                admin.disableTable(table);
                             }
-                            admin.deleteTable(TableName.valueOf(table));
+                            admin.deleteTable(table);
                             logger.debug("Dropped HBase table " + table);
                             output.append("Dropped HBase table " + table + " \n");
                         } else {

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
index 6587d4e..d5b36df 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
@@ -23,8 +23,8 @@ import java.util.List;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.cube.cuboid.Cuboid;
@@ -49,7 +49,7 @@ public class HBaseCuboidWriter implements ICuboidWriter {
 
     private final List<KeyValueCreator> keyValueCreators;
     private final int nColumns;
-    private final Table hTable;
+    private final HTableInterface hTable;
     private final CubeDesc cubeDesc;
     private final CubeSegment cubeSegment;
     private final Object[] measureValues;
@@ -58,7 +58,7 @@ public class HBaseCuboidWriter implements ICuboidWriter {
     private AbstractRowKeyEncoder rowKeyEncoder;
     private byte[] keybuf;
 
-    public HBaseCuboidWriter(CubeSegment segment, Table hTable) {
+    public HBaseCuboidWriter(CubeSegment segment, HTableInterface hTable) {
         this.keyValueCreators = Lists.newArrayList();
         this.cubeSegment = segment;
         this.cubeDesc = cubeSegment.getCubeDesc();
@@ -117,6 +117,7 @@ public class HBaseCuboidWriter implements ICuboidWriter {
             long t = System.currentTimeMillis();
             if (hTable != null) {
                 hTable.put(puts);
+                hTable.flushCommits();
             }
             logger.info("commit total " + puts.size() + " puts, totally cost:" + (System.currentTimeMillis() - t) + "ms");
             puts.clear();

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
index 2f7e164..5b2441c 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
@@ -24,11 +24,11 @@ import java.util.Collections;
 import java.util.List;
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.job.exception.ExecuteException;
 import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableContext;
@@ -69,20 +69,19 @@ public class MergeGCStep extends AbstractExecutable {
         List<String> oldTables = getOldHTables();
         if (oldTables != null && oldTables.size() > 0) {
             String metadataUrlPrefix = KylinConfig.getInstanceFromEnv().getMetadataUrlPrefix();
-            Admin admin = null;
+            Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+            HBaseAdmin admin = null;
             try {
-                Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-                admin = conn.getAdmin();
-
+                admin = new HBaseAdmin(conf);
                 for (String table : oldTables) {
-                    if (admin.tableExists(TableName.valueOf(table))) {
-                        HTableDescriptor tableDescriptor = admin.getTableDescriptor(TableName.valueOf((table)));
+                    if (admin.tableExists(table)) {
+                        HTableDescriptor tableDescriptor = admin.getTableDescriptor(Bytes.toBytes(table));
                         String host = tableDescriptor.getValue(IRealizationConstants.HTableTag);
                         if (metadataUrlPrefix.equalsIgnoreCase(host)) {
-                            if (admin.isTableEnabled(TableName.valueOf(table))) {
-                                admin.disableTable(TableName.valueOf(table));
+                            if (admin.isTableEnabled(table)) {
+                                admin.disableTable(table);
                             }
-                            admin.deleteTable(TableName.valueOf(table));
+                            admin.deleteTable(table);
                             logger.debug("Dropped htable: " + table);
                             output.append("HBase table " + table + " is dropped. \n");
                         } else {

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
index 56f867a..a150607 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
@@ -21,11 +21,9 @@ package org.apache.kylin.storage.hbase.util;
 import java.io.IOException;
 
 import org.apache.commons.cli.Options;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
@@ -40,8 +38,8 @@ public class CleanHtableCLI extends AbstractApplication {
     protected static final Logger logger = LoggerFactory.getLogger(CleanHtableCLI.class);
 
     private void clean() throws IOException {
-        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-        Admin hbaseAdmin = conn.getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
 
         for (HTableDescriptor descriptor : hbaseAdmin.listTables()) {
             String name = descriptor.getNameAsString().toLowerCase();
@@ -52,7 +50,7 @@ public class CleanHtableCLI extends AbstractApplication {
                 System.out.println();
 
                 descriptor.setValue(IRealizationConstants.HTableOwner, "DL-eBay-Kylin@ebay.com");
-                hbaseAdmin.modifyTable(TableName.valueOf(descriptor.getNameAsString()), descriptor);
+                hbaseAdmin.modifyTable(descriptor.getNameAsString(), descriptor);
             }
         }
         hbaseAdmin.close();

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
index 581de38..68c0a39 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
@@ -26,19 +26,19 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.RawResource;
@@ -89,7 +89,7 @@ public class CubeMigrationCLI {
     private static ResourceStore srcStore;
     private static ResourceStore dstStore;
     private static FileSystem hdfsFS;
-    private static Admin hbaseAdmin;
+    private static HBaseAdmin hbaseAdmin;
 
     public static final String ACL_INFO_FAMILY = "i";
     private static final String ACL_TABLE_NAME = "_acl";
@@ -134,8 +134,8 @@ public class CubeMigrationCLI {
 
         checkAndGetHbaseUrl();
 
-        Connection conn = HBaseConnection.get(srcConfig.getStorageUrl());
-        hbaseAdmin = conn.getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        hbaseAdmin = new HBaseAdmin(conf);
 
         hdfsFS = HadoopUtil.getWorkingFileSystem();
 
@@ -233,7 +233,6 @@ public class CubeMigrationCLI {
             operations.add(new Opt(OptType.COPY_DICT_OR_SNAPSHOT, new Object[] { item, cube.getName() }));
         }
     }
-
     private static void addCubeAndModelIntoProject(CubeInstance srcCube, String cubeName, String projectName) throws IOException {
         String projectResPath = ProjectInstance.concatResourcePath(projectName);
         if (!dstStore.exists(projectResPath))
@@ -327,8 +326,8 @@ public class CubeMigrationCLI {
 
         switch (opt.type) {
         case CHANGE_HTABLE_HOST: {
-            TableName tableName = TableName.valueOf((String) opt.params[0]);
-            HTableDescriptor desc = hbaseAdmin.getTableDescriptor(tableName);
+            String tableName = (String) opt.params[0];
+            HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
             hbaseAdmin.disableTable(tableName);
             desc.setValue(IRealizationConstants.HTableTag, dstConfig.getMetadataUrlPrefix());
             hbaseAdmin.modifyTable(tableName, desc);
@@ -450,11 +449,11 @@ public class CubeMigrationCLI {
             Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
             ProjectInstance project = dstStore.getResource(projectResPath, ProjectInstance.class, projectSerializer);
             String projUUID = project.getUuid();
-            Table srcAclHtable = null;
-            Table destAclHtable = null;
+            HTableInterface srcAclHtable = null;
+            HTableInterface destAclHtable = null;
             try {
-                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(TableName.valueOf(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
 
                 // cube acl
                 Result result = srcAclHtable.get(new Get(Bytes.toBytes(cubeId)));
@@ -474,6 +473,7 @@ public class CubeMigrationCLI {
                         destAclHtable.put(put);
                     }
                 }
+                destAclHtable.flushCommits();
             } finally {
                 IOUtils.closeQuietly(srcAclHtable);
                 IOUtils.closeQuietly(destAclHtable);
@@ -504,8 +504,8 @@ public class CubeMigrationCLI {
 
         switch (opt.type) {
         case CHANGE_HTABLE_HOST: {
-            TableName tableName = TableName.valueOf((String) opt.params[0]);
-            HTableDescriptor desc = hbaseAdmin.getTableDescriptor(tableName);
+            String tableName = (String) opt.params[0];
+            HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
             hbaseAdmin.disableTable(tableName);
             desc.setValue(IRealizationConstants.HTableTag, srcConfig.getMetadataUrlPrefix());
             hbaseAdmin.modifyTable(tableName, desc);
@@ -539,12 +539,13 @@ public class CubeMigrationCLI {
         case COPY_ACL: {
             String cubeId = (String) opt.params[0];
             String modelId = (String) opt.params[1];
-            Table destAclHtable = null;
+            HTableInterface destAclHtable = null;
             try {
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
 
                 destAclHtable.delete(new Delete(Bytes.toBytes(cubeId)));
                 destAclHtable.delete(new Delete(Bytes.toBytes(modelId)));
+                destAclHtable.flushCommits();
             } finally {
                 IOUtils.closeQuietly(destAclHtable);
             }
@@ -561,7 +562,7 @@ public class CubeMigrationCLI {
         }
     }
 
-    private static void updateMeta(KylinConfig config) {
+    private static void updateMeta(KylinConfig config){
         String[] nodes = config.getRestServers();
         for (String node : nodes) {
             RestClient restClient = new RestClient(node);

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
index 20d0f7d..8bd4abf 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
@@ -26,10 +26,10 @@ import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.cube.CubeInstance;
@@ -61,7 +61,7 @@ public class CubeMigrationCheckCLI {
     private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("The name of cube migrated").create("cube");
 
     private KylinConfig dstCfg;
-    private Admin hbaseAdmin;
+    private HBaseAdmin hbaseAdmin;
 
     private List<String> issueExistHTables;
     private List<String> inconsistentHTables;
@@ -130,8 +130,9 @@ public class CubeMigrationCheckCLI {
         this.dstCfg = kylinConfig;
         this.ifFix = isFix;
 
-        Connection conn = HBaseConnection.get(kylinConfig.getStorageUrl());
-        hbaseAdmin = conn.getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        hbaseAdmin = new HBaseAdmin(conf);
+
         issueExistHTables = Lists.newArrayList();
         inconsistentHTables = Lists.newArrayList();
     }
@@ -188,10 +189,10 @@ public class CubeMigrationCheckCLI {
                 String[] sepNameList = segFullName.split(",");
                 HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0]));
                 logger.info("Change the host of htable " + sepNameList[0] + "belonging to cube " + sepNameList[1] + " from " + desc.getValue(IRealizationConstants.HTableTag) + " to " + dstCfg.getMetadataUrlPrefix());
-                hbaseAdmin.disableTable(TableName.valueOf(sepNameList[0]));
+                hbaseAdmin.disableTable(sepNameList[0]);
                 desc.setValue(IRealizationConstants.HTableTag, dstCfg.getMetadataUrlPrefix());
-                hbaseAdmin.modifyTable(TableName.valueOf(sepNameList[0]), desc);
-                hbaseAdmin.enableTable(TableName.valueOf(sepNameList[0]));
+                hbaseAdmin.modifyTable(sepNameList[0], desc);
+                hbaseAdmin.enableTable(sepNameList[0]);
             }
         } else {
             logger.info("------ Inconsistent HTables Needed To Be Fixed ------");

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
index 29c738e..c9e969f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
@@ -44,8 +44,7 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.KylinVersion;
@@ -86,8 +85,7 @@ public class DeployCoprocessorCLI {
         KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
         FileSystem fileSystem = FileSystem.get(hconf);
-        Connection conn = HBaseConnection.get(kylinConfig.getStorageUrl());
-        Admin hbaseAdmin = conn.getAdmin();
+        HBaseAdmin hbaseAdmin = new HBaseAdmin(hconf);
 
         String localCoprocessorJar;
         if ("default".equals(args[0])) {
@@ -205,10 +203,10 @@ public class DeployCoprocessorCLI {
     public static void deployCoprocessor(HTableDescriptor tableDesc) {
         try {
             initHTableCoprocessor(tableDesc);
-            logger.info("hbase table " + tableDesc.getTableName() + " deployed with coprocessor.");
+            logger.info("hbase table " + tableDesc.getName() + " deployed with coprocessor.");
 
         } catch (Exception ex) {
-            logger.error("Error deploying coprocessor on " + tableDesc.getTableName(), ex);
+            logger.error("Error deploying coprocessor on " + tableDesc.getName(), ex);
             logger.error("Will try creating the table without coprocessor.");
         }
     }
@@ -229,7 +227,7 @@ public class DeployCoprocessorCLI {
         desc.addCoprocessor(CubeEndpointClass, hdfsCoprocessorJar, 1001, null);
     }
 
-    public static boolean resetCoprocessor(String tableName, Admin hbaseAdmin, Path hdfsCoprocessorJar) throws IOException {
+    public static boolean resetCoprocessor(String tableName, HBaseAdmin hbaseAdmin, Path hdfsCoprocessorJar) throws IOException {
         KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
 
@@ -244,7 +242,7 @@ public class DeployCoprocessorCLI {
         logger.info("reset coprocessor on " + tableName);
 
         logger.info("Disable " + tableName);
-        hbaseAdmin.disableTable(TableName.valueOf(tableName));
+        hbaseAdmin.disableTable(tableName);
 
         while (desc.hasCoprocessor(CubeObserverClassOld2)) {
             desc.removeCoprocessor(CubeObserverClassOld2);
@@ -270,15 +268,16 @@ public class DeployCoprocessorCLI {
             desc.setValue(IRealizationConstants.HTableGitTag, commitInfo);
         }
 
-        hbaseAdmin.modifyTable(TableName.valueOf(tableName), desc);
+        hbaseAdmin.modifyTable(tableName, desc);
 
         logger.info("Enable " + tableName);
-        hbaseAdmin.enableTable(TableName.valueOf(tableName));
+        hbaseAdmin.enableTable(tableName);
 
         return true;
     }
 
-    private static List<String> resetCoprocessorOnHTables(final Admin hbaseAdmin, final Path hdfsCoprocessorJar, List<String> tableNames) throws IOException {
+
+    private static List<String> resetCoprocessorOnHTables(final HBaseAdmin hbaseAdmin, final Path hdfsCoprocessorJar, List<String> tableNames) throws IOException {
         List<String> processedTables = Collections.synchronizedList(new ArrayList<String>());
         ExecutorService coprocessorPool = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() * 2);
         CountDownLatch countDownLatch = new CountDownLatch(tableNames.size());
@@ -299,12 +298,12 @@ public class DeployCoprocessorCLI {
 
     private static class ResetCoprocessorWorker implements Runnable {
         private final CountDownLatch countDownLatch;
-        private final Admin hbaseAdmin;
+        private final HBaseAdmin hbaseAdmin;
         private final Path hdfsCoprocessorJar;
         private final String tableName;
         private final List<String> processedTables;
 
-        public ResetCoprocessorWorker(CountDownLatch countDownLatch, Admin hbaseAdmin, Path hdfsCoprocessorJar, String tableName, List<String> processedTables) {
+        public ResetCoprocessorWorker(CountDownLatch countDownLatch, HBaseAdmin hbaseAdmin, Path hdfsCoprocessorJar, String tableName, List<String> processedTables) {
             this.countDownLatch = countDownLatch;
             this.hbaseAdmin = hbaseAdmin;
             this.hdfsCoprocessorJar = hdfsCoprocessorJar;
@@ -425,7 +424,7 @@ public class DeployCoprocessorCLI {
         return coprocessorDir;
     }
 
-    private static Set<String> getCoprocessorJarPaths(Admin hbaseAdmin, List<String> tableNames) throws IOException {
+    private static Set<String> getCoprocessorJarPaths(HBaseAdmin hbaseAdmin, List<String> tableNames) throws IOException {
         HashSet<String> result = new HashSet<String>();
 
         for (String tableName : tableNames) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
index 1cdb2f8..61c73d5 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
@@ -25,11 +25,10 @@ import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -236,9 +235,9 @@ public class ExtendCubeToHybridCLI {
         Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
         ProjectInstance project = store.getResource(projectResPath, ProjectInstance.class, projectSerializer);
         String projUUID = project.getUuid();
-        Table aclHtable = null;
+        HTableInterface aclHtable = null;
         try {
-            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(TableName.valueOf(kylinConfig.getMetadataUrlPrefix() + "_acl"));
+            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(kylinConfig.getMetadataUrlPrefix() + "_acl");
 
             // cube acl
             Result result = aclHtable.get(new Get(Bytes.toBytes(origCubeId)));
@@ -258,6 +257,7 @@ public class ExtendCubeToHybridCLI {
                     aclHtable.put(put);
                 }
             }
+            aclHtable.flushCommits();
         } finally {
             IOUtils.closeQuietly(aclHtable);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
index dd5f8fa..86ba22f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
@@ -28,13 +28,13 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.Pair;
@@ -75,7 +75,7 @@ public class GridTableHBaseBenchmark {
         System.out.println("Testing grid table scanning, hit ratio " + hitRatio + ", index ratio " + indexRatio);
         String hbaseUrl = "hbase"; // use hbase-site.xml on classpath
 
-        Connection conn = HBaseConnection.get(hbaseUrl);
+        HConnection conn = HBaseConnection.get(hbaseUrl);
         createHTableIfNeeded(conn, TEST_TABLE);
         prepareData(conn);
 
@@ -91,10 +91,10 @@ public class GridTableHBaseBenchmark {
 
     }
 
-    private static void testColumnScan(Connection conn, List<Pair<Integer, Integer>> colScans) throws IOException {
+    private static void testColumnScan(HConnection conn, List<Pair<Integer, Integer>> colScans) throws IOException {
         Stats stats = new Stats("COLUMN_SCAN");
 
-        Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
+        HTableInterface table = conn.getTable(TEST_TABLE);
         try {
             stats.markStart();
 
@@ -122,20 +122,20 @@ public class GridTableHBaseBenchmark {
         }
     }
 
-    private static void testRowScanNoIndexFullScan(Connection conn, boolean[] hits) throws IOException {
+    private static void testRowScanNoIndexFullScan(HConnection conn, boolean[] hits) throws IOException {
         fullScan(conn, hits, new Stats("ROW_SCAN_NO_IDX_FULL"));
     }
 
-    private static void testRowScanNoIndexSkipScan(Connection conn, boolean[] hits) throws IOException {
+    private static void testRowScanNoIndexSkipScan(HConnection conn, boolean[] hits) throws IOException {
         jumpScan(conn, hits, new Stats("ROW_SCAN_NO_IDX_SKIP"));
     }
 
-    private static void testRowScanWithIndex(Connection conn, boolean[] hits) throws IOException {
+    private static void testRowScanWithIndex(HConnection conn, boolean[] hits) throws IOException {
         jumpScan(conn, hits, new Stats("ROW_SCAN_IDX"));
     }
 
-    private static void fullScan(Connection conn, boolean[] hits, Stats stats) throws IOException {
-        Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
+    private static void fullScan(HConnection conn, boolean[] hits, Stats stats) throws IOException {
+        HTableInterface table = conn.getTable(TEST_TABLE);
         try {
             stats.markStart();
 
@@ -156,11 +156,11 @@ public class GridTableHBaseBenchmark {
         }
     }
 
-    private static void jumpScan(Connection conn, boolean[] hits, Stats stats) throws IOException {
+    private static void jumpScan(HConnection conn, boolean[] hits, Stats stats) throws IOException {
 
         final int jumpThreshold = 6; // compensate for Scan() overhead, totally by experience
 
-        Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
+        HTableInterface table = conn.getTable(TEST_TABLE);
         try {
 
             stats.markStart();
@@ -204,8 +204,8 @@ public class GridTableHBaseBenchmark {
         }
     }
 
-    private static void prepareData(Connection conn) throws IOException {
-        Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
+    private static void prepareData(HConnection conn) throws IOException {
+        HTableInterface table = conn.getTable(TEST_TABLE);
 
         try {
             // check how many rows existing
@@ -258,8 +258,8 @@ public class GridTableHBaseBenchmark {
         return bytes;
     }
 
-    private static void createHTableIfNeeded(Connection conn, String tableName) throws IOException {
-        Admin hbase = conn.getAdmin();
+    private static void createHTableIfNeeded(HConnection conn, String tableName) throws IOException {
+        HBaseAdmin hbase = new HBaseAdmin(conn);
 
         try {
             boolean tableExist = false;

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
index 940d64a..6749d6c 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
@@ -24,11 +24,9 @@ import java.util.List;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
@@ -57,8 +55,8 @@ public class HBaseClean extends AbstractApplication {
     private void cleanUp() {
         try {
             // get all kylin hbase tables
-            Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-            Admin hbaseAdmin = conn.getAdmin();
+            Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+            HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
             String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
             HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
             List<String> allTablesNeedToBeDropped = Lists.newArrayList();
@@ -73,12 +71,12 @@ public class HBaseClean extends AbstractApplication {
                 // drop tables
                 for (String htableName : allTablesNeedToBeDropped) {
                     logger.info("Deleting HBase table " + htableName);
-                    if (hbaseAdmin.tableExists(TableName.valueOf(htableName))) {
-                        if (hbaseAdmin.isTableEnabled(TableName.valueOf(htableName))) {
-                            hbaseAdmin.disableTable(TableName.valueOf(htableName));
+                    if (hbaseAdmin.tableExists(htableName)) {
+                        if (hbaseAdmin.isTableEnabled(htableName)) {
+                            hbaseAdmin.disableTable(htableName);
                         }
 
-                        hbaseAdmin.deleteTable(TableName.valueOf(htableName));
+                        hbaseAdmin.deleteTable(htableName);
                         logger.info("Deleted HBase table " + htableName);
                     } else {
                         logger.info("HBase table" + htableName + " does not exist");

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
index 1daca0a..937b65f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
@@ -23,7 +23,6 @@ import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
@@ -32,15 +31,12 @@ import java.util.TreeSet;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.ClusterStatus;
-import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.RegionLoad;
 import org.apache.hadoop.hbase.ServerLoad;
 import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.RegionLocator;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.kylin.common.util.Pair;
 import org.slf4j.Logger;
@@ -62,31 +58,30 @@ public class HBaseRegionSizeCalculator {
     /**
      * Computes size of each region for table and given column families.
      * */
-    public HBaseRegionSizeCalculator(String tableName, Connection hbaseConnection) throws IOException {
+    public HBaseRegionSizeCalculator(HTable table) throws IOException {
+        this(table, new HBaseAdmin(table.getConfiguration()));
+    }
 
-        Table table = null;
-        Admin admin = null;
-        try {
-            table = hbaseConnection.getTable(TableName.valueOf(tableName));
-            admin = hbaseConnection.getAdmin();
+    /** Constructor for unit testing */
+    HBaseRegionSizeCalculator(HTable table, HBaseAdmin hBaseAdmin) throws IOException {
 
+        try {
             if (!enabled(table.getConfiguration())) {
                 logger.info("Region size calculation disabled.");
                 return;
             }
 
-            logger.info("Calculating region sizes for table \"" + table.getName() + "\".");
+            logger.info("Calculating region sizes for table \"" + new String(table.getTableName()) + "\".");
 
             // Get regions for table.
-            RegionLocator regionLocator = hbaseConnection.getRegionLocator(table.getName());
-            List<HRegionLocation> regionLocationList = regionLocator.getAllRegionLocations();
+            Set<HRegionInfo> tableRegionInfos = table.getRegionLocations().keySet();
             Set<byte[]> tableRegions = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
 
-            for (HRegionLocation hRegionLocation : regionLocationList) {
-                tableRegions.add(hRegionLocation.getRegionInfo().getRegionName());
+            for (HRegionInfo regionInfo : tableRegionInfos) {
+                tableRegions.add(regionInfo.getRegionName());
             }
 
-            ClusterStatus clusterStatus = admin.getClusterStatus();
+            ClusterStatus clusterStatus = hBaseAdmin.getClusterStatus();
             Collection<ServerName> servers = clusterStatus.getServers();
             final long megaByte = 1024L * 1024L;
 
@@ -110,7 +105,7 @@ public class HBaseRegionSizeCalculator {
                 }
             }
         } finally {
-            IOUtils.closeQuietly(admin);
+            IOUtils.closeQuietly(hBaseAdmin);
         }
 
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
index a2f60d4..266f7e7 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
@@ -23,10 +23,9 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 
@@ -43,8 +42,8 @@ public class HBaseUsage {
         Map<String, List<String>> envs = Maps.newHashMap();
 
         // get all kylin hbase tables
-        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-        Admin hbaseAdmin = conn.getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
         String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
         HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
         for (HTableDescriptor desc : tableDescriptors) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
index 8dd2164..1db60fb 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
@@ -32,15 +32,15 @@ import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
-import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
@@ -58,11 +58,11 @@ public class HbaseStreamingInput {
     private static final byte[] QN = "C".getBytes();
 
     public static void createTable(String tableName) throws IOException {
-        Connection conn = getConnection();
-        Admin hadmin = conn.getAdmin();
+        HConnection conn = getConnection();
+        HBaseAdmin hadmin = new HBaseAdmin(conn);
 
         try {
-            boolean tableExist = hadmin.tableExists(TableName.valueOf(tableName));
+            boolean tableExist = hadmin.tableExists(tableName);
             if (tableExist) {
                 logger.info("HTable '" + tableName + "' already exists");
                 return;
@@ -120,8 +120,8 @@ public class HbaseStreamingInput {
                 e.printStackTrace();
             }
 
-            Connection conn = getConnection();
-            Table table = conn.getTable(TableName.valueOf(tableName));
+            HConnection conn = getConnection();
+            HTableInterface table = conn.getTable(tableName);
 
             byte[] key = new byte[8 + 4];//time + id
 
@@ -136,7 +136,7 @@ public class HbaseStreamingInput {
                 Bytes.putInt(key, 8, i);
                 Put put = new Put(key);
                 byte[] cell = randomBytes(CELL_SIZE);
-                put.addColumn(CF, QN, cell);
+                put.add(CF, QN, cell);
                 buffer.add(put);
             }
             table.put(buffer);
@@ -172,8 +172,8 @@ public class HbaseStreamingInput {
             }
 
             Random r = new Random();
-            Connection conn = getConnection();
-            Table table = conn.getTable(TableName.valueOf(tableName));
+            HConnection conn = getConnection();
+            HTableInterface table = conn.getTable(tableName);
 
             long leftBound = getFirstKeyTime(table);
             long rightBound = System.currentTimeMillis();
@@ -208,7 +208,7 @@ public class HbaseStreamingInput {
         }
     }
 
-    private static long getFirstKeyTime(Table table) throws IOException {
+    private static long getFirstKeyTime(HTableInterface table) throws IOException {
         long startTime = 0;
 
         Scan scan = new Scan();
@@ -226,8 +226,8 @@ public class HbaseStreamingInput {
 
     }
 
-    private static Connection getConnection() throws IOException {
-        return HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+    private static HConnection getConnection() throws IOException {
+        return HConnectionManager.createConnection(HBaseConnection.getCurrentHBaseConfiguration());
     }
 
     private static String formatTime(long time) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
index ea05ab2..ca1a060 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
@@ -23,11 +23,10 @@ import java.io.IOException;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.engine.mr.common.BatchConstants;
@@ -51,8 +50,8 @@ public class HtableAlterMetadataCLI extends AbstractApplication {
     String metadataValue;
 
     private void alter() throws IOException {
-        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-        Admin hbaseAdmin = conn.getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
         HTableDescriptor table = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
 
         hbaseAdmin.disableTable(table.getTableName());

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
index df4e912..8ff5b0f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
@@ -30,14 +30,10 @@ import org.apache.commons.cli.Options;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
-import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -56,9 +52,9 @@ public class OrphanHBaseCleanJob extends AbstractApplication {
     Set<String> metastoreWhitelistSet = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
 
     private void cleanUnusedHBaseTables(Configuration conf) throws IOException {
-        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+
         // get all kylin hbase tables
-        Admin hbaseAdmin = conn.getAdmin();
+        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
         String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
         HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
         List<String> allTablesNeedToBeDropped = new ArrayList<String>();
@@ -77,13 +73,12 @@ public class OrphanHBaseCleanJob extends AbstractApplication {
             // drop tables
             for (String htableName : allTablesNeedToBeDropped) {
                 logger.info("Deleting HBase table " + htableName);
-                TableName tableName = TableName.valueOf(htableName);
-                if (hbaseAdmin.tableExists(tableName)) {
-                    if (hbaseAdmin.isTableEnabled(tableName)) {
-                        hbaseAdmin.disableTable(tableName);
+                if (hbaseAdmin.tableExists(htableName)) {
+                    if (hbaseAdmin.isTableEnabled(htableName)) {
+                        hbaseAdmin.disableTable(htableName);
                     }
 
-                    hbaseAdmin.deleteTable(tableName);
+                    hbaseAdmin.deleteTable(htableName);
                     logger.info("Deleted HBase table " + htableName);
                 } else {
                     logger.info("HBase table" + htableName + " does not exist");

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
index bba6745..1ea8e8d 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
@@ -22,13 +22,12 @@ import java.io.IOException;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.token.TokenUtil;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -60,12 +59,12 @@ public class PingHBaseCLI {
         Scan scan = new Scan();
         int limit = 20;
 
-        Connection conn = null;
-        Table table = null;
+        HConnection conn = null;
+        HTableInterface table = null;
         ResultScanner scanner = null;
         try {
-            conn = ConnectionFactory.createConnection(hconf);
-            table = conn.getTable(TableName.valueOf(hbaseTable));
+            conn = HConnectionManager.createConnection(hconf);
+            table = conn.getTable(hbaseTable);
             scanner = table.getScanner(scan);
             int count = 0;
             for (Result r : scanner) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
index db516bb..01edb1f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
@@ -22,12 +22,11 @@ import java.io.IOException;
 import java.util.Iterator;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.BytesUtil;
 import org.apache.kylin.storage.hbase.HBaseConnection;
@@ -71,8 +70,8 @@ public class RowCounterCLI {
 
         logger.info("My Scan " + scan.toString());
 
-        Connection conn = ConnectionFactory.createConnection(conf);
-        Table tableInterface = conn.getTable(TableName.valueOf(htableName));
+        HConnection conn = HConnectionManager.createConnection(conf);
+        HTableInterface tableInterface = conn.getTable(htableName);
 
         Iterator<Result> iterator = tableInterface.getScanner(scan).iterator();
         int counter = 0;

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
index f6b65ab..23e7e10 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
@@ -40,9 +40,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.CliCommandExecutor;
@@ -59,7 +57,6 @@ import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableManager;
 import org.apache.kylin.job.execution.ExecutableState;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
-import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -80,8 +77,7 @@ public class StorageCleanupJob extends AbstractApplication {
     private void cleanUnusedHBaseTables(Configuration conf) throws IOException {
         CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
         // get all kylin hbase tables
-        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-        Admin hbaseAdmin = conn.getAdmin();
+        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
         String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
         HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
         List<String> allTablesNeedToBeDropped = new ArrayList<String>();
@@ -157,22 +153,22 @@ public class StorageCleanupJob extends AbstractApplication {
     }
 
     class DeleteHTableRunnable implements Callable {
-        Admin hbaseAdmin;
+        HBaseAdmin hbaseAdmin;
         String htableName;
 
-        DeleteHTableRunnable(Admin hbaseAdmin, String htableName) {
+        DeleteHTableRunnable(HBaseAdmin hbaseAdmin, String htableName) {
             this.hbaseAdmin = hbaseAdmin;
             this.htableName = htableName;
         }
 
         public Object call() throws Exception {
             logger.info("Deleting HBase table " + htableName);
-            if (hbaseAdmin.tableExists(TableName.valueOf(htableName))) {
-                if (hbaseAdmin.isTableEnabled(TableName.valueOf(htableName))) {
-                    hbaseAdmin.disableTable(TableName.valueOf(htableName));
+            if (hbaseAdmin.tableExists(htableName)) {
+                if (hbaseAdmin.isTableEnabled(htableName)) {
+                    hbaseAdmin.disableTable(htableName);
                 }
 
-                hbaseAdmin.deleteTable(TableName.valueOf(htableName));
+                hbaseAdmin.deleteTable(htableName);
                 logger.info("Deleted HBase table " + htableName);
             } else {
                 logger.info("HBase table" + htableName + " does not exist");

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
index 42a54c8..e36f662 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
@@ -24,18 +24,16 @@ import java.util.Arrays;
 import java.util.List;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -51,15 +49,14 @@ public class UpdateHTableHostCLI {
     private List<String> errorMsgs = Lists.newArrayList();
 
     private List<String> htables;
-    private Admin hbaseAdmin;
+    private HBaseAdmin hbaseAdmin;
     private KylinConfig kylinConfig;
     private String oldHostValue;
 
     public UpdateHTableHostCLI(List<String> htables, String oldHostValue) throws IOException {
         this.htables = htables;
         this.oldHostValue = oldHostValue;
-        Connection conn = ConnectionFactory.createConnection(HBaseConfiguration.create());
-        hbaseAdmin = conn.getAdmin();
+        this.hbaseAdmin = new HBaseAdmin(HBaseConnection.getCurrentHBaseConfiguration());
         this.kylinConfig = KylinConfig.getInstanceFromEnv();
     }
 
@@ -169,9 +166,9 @@ public class UpdateHTableHostCLI {
         HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
         if (oldHostValue.equals(desc.getValue(IRealizationConstants.HTableTag))) {
             desc.setValue(IRealizationConstants.HTableTag, kylinConfig.getMetadataUrlPrefix());
-            hbaseAdmin.disableTable(TableName.valueOf(tableName));
-            hbaseAdmin.modifyTable(TableName.valueOf(tableName), desc);
-            hbaseAdmin.enableTable(TableName.valueOf(tableName));
+            hbaseAdmin.disableTable(tableName);
+            hbaseAdmin.modifyTable(tableName, desc);
+            hbaseAdmin.enableTable(tableName);
 
             updatedResources.add(tableName);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/tool/pom.xml
----------------------------------------------------------------------
diff --git a/tool/pom.xml b/tool/pom.xml
index 278c2b8..072ee44 100644
--- a/tool/pom.xml
+++ b/tool/pom.xml
@@ -60,16 +60,6 @@
             <artifactId>hbase-client</artifactId>
             <scope>provided</scope>
         </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-yarn-api</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-yarn-common</artifactId>
-            <scope>provided</scope>
-        </dependency>
 
         <!-- Env & Test -->
         <dependency>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
index c0042f3..c8bff89 100644
--- a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
@@ -36,9 +36,9 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.RawResource;
@@ -231,7 +231,6 @@ public class CubeMigrationCLI {
             operations.add(new Opt(OptType.COPY_DICT_OR_SNAPSHOT, new Object[] { item, cube.getName() }));
         }
     }
-
     private static void addCubeAndModelIntoProject(CubeInstance srcCube, String cubeName, String projectName) throws IOException {
         String projectResPath = ProjectInstance.concatResourcePath(projectName);
         if (!dstStore.exists(projectResPath))
@@ -448,11 +447,11 @@ public class CubeMigrationCLI {
             Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
             ProjectInstance project = dstStore.getResource(projectResPath, ProjectInstance.class, projectSerializer);
             String projUUID = project.getUuid();
-            Table srcAclHtable = null;
-            Table destAclHtable = null;
+            HTableInterface srcAclHtable = null;
+            HTableInterface destAclHtable = null;
             try {
-                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(TableName.valueOf(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
 
                 // cube acl
                 Result result = srcAclHtable.get(new Get(Bytes.toBytes(cubeId)));
@@ -472,6 +471,7 @@ public class CubeMigrationCLI {
                         destAclHtable.put(put);
                     }
                 }
+                destAclHtable.flushCommits();
             } finally {
                 IOUtils.closeQuietly(srcAclHtable);
                 IOUtils.closeQuietly(destAclHtable);
@@ -537,12 +537,13 @@ public class CubeMigrationCLI {
         case COPY_ACL: {
             String cubeId = (String) opt.params[0];
             String modelId = (String) opt.params[1];
-            Table destAclHtable = null;
+            HTableInterface destAclHtable = null;
             try {
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
 
                 destAclHtable.delete(new Delete(Bytes.toBytes(cubeId)));
                 destAclHtable.delete(new Delete(Bytes.toBytes(modelId)));
+                destAclHtable.flushCommits();
             } finally {
                 IOUtils.closeQuietly(destAclHtable);
             }
@@ -559,7 +560,7 @@ public class CubeMigrationCLI {
         }
     }
 
-    private static void updateMeta(KylinConfig config) {
+    private static void updateMeta(KylinConfig config){
         String[] nodes = config.getRestServers();
         for (String node : nodes) {
             RestClient restClient = new RestClient(node);

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java b/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
index f52fc3e..19e5db0 100644
--- a/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
@@ -25,11 +25,10 @@ import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -232,9 +231,9 @@ public class ExtendCubeToHybridCLI {
         Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
         ProjectInstance project = store.getResource(projectResPath, ProjectInstance.class, projectSerializer);
         String projUUID = project.getUuid();
-        Table aclHtable = null;
+        HTableInterface aclHtable = null;
         try {
-            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(TableName.valueOf(kylinConfig.getMetadataUrlPrefix() + "_acl"));
+            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(kylinConfig.getMetadataUrlPrefix() + "_acl");
 
             // cube acl
             Result result = aclHtable.get(new Get(Bytes.toBytes(origCubeId)));
@@ -254,6 +253,7 @@ public class ExtendCubeToHybridCLI {
                     aclHtable.put(put);
                 }
             }
+            aclHtable.flushCommits();
         } finally {
             IOUtils.closeQuietly(aclHtable);
         }


[05/11] kylin git commit: KYLIN-1875,change model limit remove

Posted by bi...@apache.org.
KYLIN-1875,change model limit remove

Signed-off-by: Li Yang <li...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/77df9dc1
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/77df9dc1
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/77df9dc1

Branch: refs/heads/master-hbase0.98
Commit: 77df9dc190c16a3b00c325b245c346722e65eaeb
Parents: 82f6803
Author: luguosheng <55...@qq.com>
Authored: Wed Feb 22 18:24:14 2017 +0800
Committer: Li Yang <li...@apache.org>
Committed: Thu Feb 23 11:32:13 2017 +0800

----------------------------------------------------------------------
 webapp/app/js/controllers/cubeEdit.js | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/77df9dc1/webapp/app/js/controllers/cubeEdit.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/controllers/cubeEdit.js b/webapp/app/js/controllers/cubeEdit.js
index 31b84f4..0e5038e 100755
--- a/webapp/app/js/controllers/cubeEdit.js
+++ b/webapp/app/js/controllers/cubeEdit.js
@@ -704,9 +704,11 @@ KylinApp.controller('CubeEditCtrl', function ($scope, $q, $routeParams, $locatio
     $scope.metaModel.model = modelsManager.getModel(newValue);
     if($scope.metaModel.model){
       $scope.modelsManager.initAliasMapByModelSchema($scope.metaModel);
-      $scope.cubeMetaFrame=CubeDescModel.createNew({
-        model_name:newValue
-      })
+      //if(oldValue){
+      //  $scope.cubeMetaFrame=CubeDescModel.createNew({
+      //    model_name:newValue
+      //  })
+      //}
     }
     if(!$scope.metaModel.model){
       return;


[03/11] kylin git commit: KYLIN-1875,remove useless js code

Posted by bi...@apache.org.
KYLIN-1875,remove useless js code

Signed-off-by: Billy Liu <bi...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/6f35b62b
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/6f35b62b
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/6f35b62b

Branch: refs/heads/master-hbase0.98
Commit: 6f35b62bd81d793340b944804a4f78e5d8ff7dbe
Parents: 859605f
Author: luguosheng <55...@qq.com>
Authored: Wed Feb 22 17:01:07 2017 +0800
Committer: Billy Liu <bi...@apache.org>
Committed: Wed Feb 22 17:02:55 2017 +0800

----------------------------------------------------------------------
 webapp/app/js/controllers/cubeEdit.js | 1 -
 1 file changed, 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/6f35b62b/webapp/app/js/controllers/cubeEdit.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/controllers/cubeEdit.js b/webapp/app/js/controllers/cubeEdit.js
index 2b2d97f..31b84f4 100755
--- a/webapp/app/js/controllers/cubeEdit.js
+++ b/webapp/app/js/controllers/cubeEdit.js
@@ -704,7 +704,6 @@ KylinApp.controller('CubeEditCtrl', function ($scope, $q, $routeParams, $locatio
     $scope.metaModel.model = modelsManager.getModel(newValue);
     if($scope.metaModel.model){
       $scope.modelsManager.initAliasMapByModelSchema($scope.metaModel);
-      cubesManager.init();
       $scope.cubeMetaFrame=CubeDescModel.createNew({
         model_name:newValue
       })


[08/11] kylin git commit: KYLIN-2331 remove unnecessary kylin-spark-conf.propertiesC

Posted by bi...@apache.org.
KYLIN-2331 remove unnecessary kylin-spark-conf.propertiesC


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/4332b3d4
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/4332b3d4
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/4332b3d4

Branch: refs/heads/master-hbase0.98
Commit: 4332b3d42b631747707231ca47079c230bc628dc
Parents: 37aab3c
Author: Hongbin Ma <ma...@apache.org>
Authored: Thu Feb 23 10:35:06 2017 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Thu Feb 23 14:04:12 2017 +0800

----------------------------------------------------------------------
 .../sandbox/kylin-spark-conf.properties         | 28 --------------------
 1 file changed, 28 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/4332b3d4/examples/test_case_data/sandbox/kylin-spark-conf.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin-spark-conf.properties b/examples/test_case_data/sandbox/kylin-spark-conf.properties
deleted file mode 100644
index b4a0c97..0000000
--- a/examples/test_case_data/sandbox/kylin-spark-conf.properties
+++ /dev/null
@@ -1,28 +0,0 @@
-spark.yarn.submit.file.replication=1
-spark.yarn.executor.memoryOverhead=200
-spark.yarn.driver.memoryOverhead=384
-#spark.master=local[4]
-#spark.submit.deployMode=client
-spark.master=yarn
-spark.submit.deployMode=cluster
-spark.eventLog.enabled=true
-spark.yarn.scheduler.heartbeat.interval-ms=5000
-spark.yarn.preserve.staging.files=true
-spark.yarn.queue=default
-spark.yarn.containerLauncherMaxThreads=25
-spark.yarn.max.executor.failures=3
-spark.eventLog.dir=hdfs\:///spark-history
-spark.history.kerberos.enabled=true
-spark.history.provider=org.apache.spark.deploy.history.FsHistoryProvider
-spark.history.ui.port=18080
-spark.history.fs.logDirectory=hdfs\:///spark-history
-spark.executor.memory=1G
-spark.storage.memoryFraction=0.3
-spark.executor.cores=1
-spark.executor.instances=1
-spark.history.kerberos.keytab=none
-spark.history.kerberos.principal=none
-spark.yarn.jar=hdfs://sandbox.hortonworks.com:8020/apps/spark/spark-assembly-1.6.3-hadoop2.6.0.jar
-spark.driver.extraJavaOptions=-Dhdp.version=current
-spark.yarn.am.extraJavaOptions=-Dhdp.version=current
-spark.executor.extraJavaOptions=-Dhdp.version=current


[11/11] kylin git commit: KYLIN-2307 Create a branch for master with HBase 0.98 API

Posted by bi...@apache.org.
KYLIN-2307 Create a branch for master with HBase 0.98 API


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/f4127f47
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/f4127f47
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/f4127f47

Branch: refs/heads/master-hbase0.98
Commit: f4127f47483396adc2dbe6699448a70054255602
Parents: 6cf9749
Author: lidongsjtu <li...@apache.org>
Authored: Mon Jan 23 13:17:37 2017 +0800
Committer: Billy Liu <bi...@apache.org>
Committed: Thu Feb 23 15:26:07 2017 +0800

----------------------------------------------------------------------
 dev-support/test_all_against_hdp_2_2_4_2_2.sh   |  25 ++++
 dev-support/test_all_against_hdp_2_4_0_0_169.sh |  25 ----
 .../sandbox/capacity-scheduler.xml              |  17 ++-
 examples/test_case_data/sandbox/core-site.xml   |  28 +---
 examples/test_case_data/sandbox/hbase-site.xml  | 119 +++++------------
 examples/test_case_data/sandbox/hdfs-site.xml   |  84 +++++-------
 examples/test_case_data/sandbox/hive-site.xml   |  89 +++++--------
 examples/test_case_data/sandbox/mapred-site.xml |  57 +++------
 examples/test_case_data/sandbox/yarn-site.xml   | 127 +++----------------
 .../kylin/provision/BuildCubeWithEngine.java    |  17 +--
 pom.xml                                         | 117 +----------------
 .../kylin/rest/security/AclHBaseStorage.java    |   4 +-
 .../rest/security/MockAclHBaseStorage.java      |   8 +-
 .../apache/kylin/rest/security/MockHTable.java  |  95 +++++++++++---
 .../rest/security/RealAclHBaseStorage.java      |   9 +-
 .../apache/kylin/rest/service/AclService.java   |  25 ++--
 .../apache/kylin/rest/service/CubeService.java  |  35 +++--
 .../apache/kylin/rest/service/QueryService.java |  24 ++--
 .../apache/kylin/rest/service/UserService.java  |  17 +--
 .../kylin/storage/hbase/HBaseConnection.java    |  44 +++----
 .../kylin/storage/hbase/HBaseResourceStore.java |  31 +++--
 .../storage/hbase/cube/SimpleHBaseStore.java    |  20 +--
 .../hbase/cube/v2/CubeHBaseEndpointRPC.java     |  13 +-
 .../storage/hbase/cube/v2/CubeHBaseScanRPC.java |   9 +-
 .../coprocessor/endpoint/CubeVisitService.java  |   4 +-
 .../storage/hbase/steps/CubeHTableUtil.java     |  16 +--
 .../storage/hbase/steps/DeprecatedGCStep.java   |  24 ++--
 .../storage/hbase/steps/HBaseCuboidWriter.java  |   7 +-
 .../kylin/storage/hbase/steps/MergeGCStep.java  |  23 ++--
 .../storage/hbase/util/CleanHtableCLI.java      |  12 +-
 .../storage/hbase/util/CubeMigrationCLI.java    |  37 +++---
 .../hbase/util/CubeMigrationCheckCLI.java       |  17 +--
 .../hbase/util/DeployCoprocessorCLI.java        |  27 ++--
 .../hbase/util/ExtendCubeToHybridCLI.java       |   8 +-
 .../hbase/util/GridTableHBaseBenchmark.java     |  34 ++---
 .../kylin/storage/hbase/util/HBaseClean.java    |  18 ++-
 .../hbase/util/HBaseRegionSizeCalculator.java   |  35 +++--
 .../kylin/storage/hbase/util/HBaseUsage.java    |   9 +-
 .../storage/hbase/util/HbaseStreamingInput.java |  30 ++---
 .../hbase/util/HtableAlterMetadataCLI.java      |   9 +-
 .../storage/hbase/util/OrphanHBaseCleanJob.java |  19 +--
 .../kylin/storage/hbase/util/PingHBaseCLI.java  |  15 +--
 .../kylin/storage/hbase/util/RowCounterCLI.java |  11 +-
 .../storage/hbase/util/StorageCleanupJob.java   |  20 ++-
 .../storage/hbase/util/UpdateHTableHostCLI.java |  17 +--
 tool/pom.xml                                    |  10 --
 .../org/apache/kylin/tool/CubeMigrationCLI.java |  19 +--
 .../kylin/tool/ExtendCubeToHybridCLI.java       |   8 +-
 48 files changed, 596 insertions(+), 872 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/dev-support/test_all_against_hdp_2_2_4_2_2.sh
----------------------------------------------------------------------
diff --git a/dev-support/test_all_against_hdp_2_2_4_2_2.sh b/dev-support/test_all_against_hdp_2_2_4_2_2.sh
new file mode 100755
index 0000000..f7780dd
--- /dev/null
+++ b/dev-support/test_all_against_hdp_2_2_4_2_2.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+dir=$(dirname ${0})
+cd ${dir}
+cd ..
+
+mvn clean install -DskipTests 2>&1 | tee mci.log
+mvn verify -Dhdp.version=${HDP_VERSION:-"2.2.4.2-2"} -fae 2>&1 | tee mvnverify.log

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/dev-support/test_all_against_hdp_2_4_0_0_169.sh
----------------------------------------------------------------------
diff --git a/dev-support/test_all_against_hdp_2_4_0_0_169.sh b/dev-support/test_all_against_hdp_2_4_0_0_169.sh
deleted file mode 100755
index 2a3d24b..0000000
--- a/dev-support/test_all_against_hdp_2_4_0_0_169.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-dir=$(dirname ${0})
-cd ${dir}
-cd ..
-
-mvn clean install -DskipTests 2>&1 | tee mci.log
-mvn verify -Dhdp.version=${HDP_VERSION:-"2.4.0.0-169"} -fae 2>&1 | tee mvnverify.log

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/examples/test_case_data/sandbox/capacity-scheduler.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/capacity-scheduler.xml b/examples/test_case_data/sandbox/capacity-scheduler.xml
index e042aa5..7cb985c 100644
--- a/examples/test_case_data/sandbox/capacity-scheduler.xml
+++ b/examples/test_case_data/sandbox/capacity-scheduler.xml
@@ -47,6 +47,16 @@
     </property>
 
     <property>
+        <name>yarn.scheduler.capacity.root.accessible-node-labels.default.capacity</name>
+        <value>-1</value>
+    </property>
+
+    <property>
+        <name>yarn.scheduler.capacity.root.accessible-node-labels.default.maximum-capacity</name>
+        <value>-1</value>
+    </property>
+
+    <property>
         <name>yarn.scheduler.capacity.root.acl_administer_queue</name>
         <value>*</value>
     </property>
@@ -57,6 +67,11 @@
     </property>
 
     <property>
+        <name>yarn.scheduler.capacity.root.default-node-label-expression</name>
+        <value></value>
+    </property>
+
+    <property>
         <name>yarn.scheduler.capacity.root.default.acl_administer_jobs</name>
         <value>*</value>
     </property>
@@ -96,4 +111,4 @@
         <value>default</value>
     </property>
 
-</configuration>
\ No newline at end of file
+</configuration>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/examples/test_case_data/sandbox/core-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/core-site.xml b/examples/test_case_data/sandbox/core-site.xml
index a4ad5c6..0c5f62b 100644
--- a/examples/test_case_data/sandbox/core-site.xml
+++ b/examples/test_case_data/sandbox/core-site.xml
@@ -19,6 +19,7 @@
     <property>
         <name>fs.defaultFS</name>
         <value>hdfs://sandbox.hortonworks.com:8020</value>
+        <final>true</final>
     </property>
 
     <property>
@@ -38,7 +39,7 @@
 
     <property>
         <name>hadoop.proxyuser.falcon.groups</name>
-        <value>*</value>
+        <value>users</value>
     </property>
 
     <property>
@@ -48,7 +49,7 @@
 
     <property>
         <name>hadoop.proxyuser.hbase.groups</name>
-        <value>*</value>
+        <value>users</value>
     </property>
 
     <property>
@@ -67,23 +68,13 @@
     </property>
 
     <property>
-        <name>hadoop.proxyuser.hdfs.groups</name>
-        <value>*</value>
-    </property>
-
-    <property>
-        <name>hadoop.proxyuser.hdfs.hosts</name>
-        <value>*</value>
-    </property>
-
-    <property>
         <name>hadoop.proxyuser.hive.groups</name>
-        <value>*</value>
+        <value>users</value>
     </property>
 
     <property>
         <name>hadoop.proxyuser.hive.hosts</name>
-        <value>sandbox.hortonworks.com</value>
+        <value>*</value>
     </property>
 
     <property>
@@ -132,15 +123,8 @@
     </property>
 
     <property>
-        <name>hadoop.security.key.provider.path</name>
-        <value></value>
-    </property>
-
-    <property>
         <name>io.compression.codecs</name>
-        <value>
-            org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec
-        </value>
+        <value>org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec</value>
     </property>
 
     <property>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/examples/test_case_data/sandbox/hbase-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/hbase-site.xml b/examples/test_case_data/sandbox/hbase-site.xml
index 568de2e..46d5345 100644
--- a/examples/test_case_data/sandbox/hbase-site.xml
+++ b/examples/test_case_data/sandbox/hbase-site.xml
@@ -22,33 +22,8 @@
     </property>
 
     <property>
-        <name>hbase.bucketcache.ioengine</name>
-        <value></value>
-    </property>
-
-    <property>
-        <name>hbase.bucketcache.percentage.in.combinedcache</name>
-        <value></value>
-    </property>
-
-    <property>
-        <name>hbase.bucketcache.size</name>
-        <value></value>
-    </property>
-
-    <property>
-        <name>hbase.bulkload.staging.dir</name>
-        <value>/apps/hbase/staging</value>
-    </property>
-
-    <property>
         <name>hbase.client.keyvalue.maxsize</name>
-        <value>1048576</value>
-    </property>
-
-    <property>
-        <name>hbase.client.retries.number</name>
-        <value>35</value>
+        <value>10485760</value>
     </property>
 
     <property>
@@ -63,19 +38,12 @@
 
     <property>
         <name>hbase.coprocessor.master.classes</name>
-        <value>org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor</value>
+        <value>com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor</value>
     </property>
 
     <property>
         <name>hbase.coprocessor.region.classes</name>
-        <value>
-            org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor
-        </value>
-    </property>
-
-    <property>
-        <name>hbase.coprocessor.regionserver.classes</name>
-        <value></value>
+        <value>com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor</value>
     </property>
 
     <property>
@@ -119,11 +87,6 @@
     </property>
 
     <property>
-        <name>hbase.hstore.compaction.max</name>
-        <value>10</value>
-    </property>
-
-    <property>
         <name>hbase.hstore.compactionThreshold</name>
         <value>3</value>
     </property>
@@ -140,42 +103,32 @@
 
     <property>
         <name>hbase.master.info.port</name>
-        <value>16010</value>
+        <value>60010</value>
     </property>
 
     <property>
         <name>hbase.master.port</name>
-        <value>16000</value>
+        <value>60000</value>
     </property>
 
     <property>
-        <name>hbase.region.server.rpc.scheduler.factory.class</name>
-        <value></value>
+        <name>hbase.regionserver.global.memstore.lowerLimit</name>
+        <value>0.38</value>
     </property>
 
     <property>
-        <name>hbase.regionserver.global.memstore.size</name>
+        <name>hbase.regionserver.global.memstore.upperLimit</name>
         <value>0.4</value>
     </property>
 
     <property>
         <name>hbase.regionserver.handler.count</name>
-        <value>30</value>
+        <value>60</value>
     </property>
 
     <property>
         <name>hbase.regionserver.info.port</name>
-        <value>16030</value>
-    </property>
-
-    <property>
-        <name>hbase.regionserver.port</name>
-        <value>16020</value>
-    </property>
-
-    <property>
-        <name>hbase.regionserver.wal.codec</name>
-        <value>org.apache.hadoop.hbase.regionserver.wal.WALCellCodec</value>
+        <value>60030</value>
     </property>
 
     <property>
@@ -184,26 +137,11 @@
     </property>
 
     <property>
-        <name>hbase.rpc.controllerfactory.class</name>
-        <value></value>
-    </property>
-
-    <property>
-        <name>hbase.rpc.engine</name>
-        <value>org.apache.hadoop.hbase.ipc.SecureRpcEngine</value>
-    </property>
-
-    <property>
         <name>hbase.rpc.protection</name>
         <value>PRIVACY</value>
     </property>
 
     <property>
-        <name>hbase.rpc.timeout</name>
-        <value>90000</value>
-    </property>
-
-    <property>
         <name>hbase.security.authentication</name>
         <value>simple</value>
     </property>
@@ -220,7 +158,7 @@
 
     <property>
         <name>hbase.tmp.dir</name>
-        <value>/tmp/hbase-${user.name}</value>
+        <value>/hadoop/hbase</value>
     </property>
 
     <property>
@@ -240,27 +178,34 @@
 
     <property>
         <name>hfile.block.cache.size</name>
-        <value>0.4</value>
-    </property>
-
-    <property>
-        <name>phoenix.functions.allowUserDefinedFunctions</name>
-        <value></value>
-    </property>
-
-    <property>
-        <name>phoenix.query.timeoutMs</name>
-        <value>60000</value>
+        <value>0.40</value>
     </property>
 
     <property>
         <name>zookeeper.session.timeout</name>
-        <value>60000</value>
+        <value>30000</value>
     </property>
 
     <property>
         <name>zookeeper.znode.parent</name>
         <value>/hbase-unsecure</value>
     </property>
-
-</configuration>
\ No newline at end of file
+    <property>
+        <name>hbase.client.pause</name>
+        <value>100</value>
+        <description>General client pause value.  Used mostly as value to wait
+            before running a retry of a failed get, region lookup, etc.
+            See hbase.client.retries.number for description of how we backoff from
+            this initial pause amount and how this pause works w/ retries.</description>
+    </property>
+    <property>
+        <name>hbase.client.retries.number</name>
+        <value>5</value>
+        <description>Maximum retries.  Used as maximum for all retryable
+            operations such as the getting of a cell's value, starting a row update,
+            etc.  Retry interval is a rough function based on hbase.client.pause.  At
+            first we retry at this interval but then with backoff, we pretty quickly reach
+            retrying every ten seconds.  See HConstants#RETRY_BACKOFF for how the backup
+            ramps up.  Change this setting and hbase.client.pause to suit your workload.</description>
+    </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/examples/test_case_data/sandbox/hdfs-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/hdfs-site.xml b/examples/test_case_data/sandbox/hdfs-site.xml
index 1d9040a..1175fff 100644
--- a/examples/test_case_data/sandbox/hdfs-site.xml
+++ b/examples/test_case_data/sandbox/hdfs-site.xml
@@ -18,7 +18,12 @@
 
     <property>
         <name>dfs.block.access.token.enable</name>
-        <value>true</value>
+        <value>false</value>
+    </property>
+
+    <property>
+        <name>dfs.block.size</name>
+        <value>34217472</value>
     </property>
 
     <property>
@@ -42,21 +47,11 @@
     </property>
 
     <property>
-        <name>dfs.client.retry.policy.enabled</name>
-        <value>false</value>
-    </property>
-
-    <property>
         <name>dfs.cluster.administrators</name>
         <value>hdfs</value>
     </property>
 
     <property>
-        <name>dfs.content-summary.limit</name>
-        <value>5000</value>
-    </property>
-
-    <property>
         <name>dfs.datanode.address</name>
         <value>0.0.0.0:50010</value>
     </property>
@@ -69,6 +64,7 @@
     <property>
         <name>dfs.datanode.data.dir</name>
         <value>/hadoop/hdfs/data</value>
+        <final>true</final>
     </property>
 
     <property>
@@ -84,6 +80,7 @@
     <property>
         <name>dfs.datanode.failed.volumes.tolerated</name>
         <value>0</value>
+        <final>true</final>
     </property>
 
     <property>
@@ -107,18 +104,13 @@
     </property>
 
     <property>
-        <name>dfs.domain.socket.path</name>
-        <value>/var/lib/hadoop-hdfs/dn_socket</value>
-    </property>
-
-    <property>
-        <name>dfs.encrypt.data.transfer.cipher.suites</name>
-        <value>AES/CTR/NoPadding</value>
+        <name>dfs.datanode.max.xcievers</name>
+        <value>1024</value>
     </property>
 
     <property>
-        <name>dfs.encryption.key.provider.uri</name>
-        <value></value>
+        <name>dfs.domain.socket.path</name>
+        <value>/var/lib/hadoop-hdfs/dn_socket</value>
     </property>
 
     <property>
@@ -158,12 +150,7 @@
 
     <property>
         <name>dfs.namenode.accesstime.precision</name>
-        <value>0</value>
-    </property>
-
-    <property>
-        <name>dfs.namenode.audit.log.async</name>
-        <value>true</value>
+        <value>3600000</value>
     </property>
 
     <property>
@@ -197,11 +184,6 @@
     </property>
 
     <property>
-        <name>dfs.namenode.fslock.fair</name>
-        <value>false</value>
-    </property>
-
-    <property>
         <name>dfs.namenode.handler.count</name>
         <value>100</value>
     </property>
@@ -209,6 +191,7 @@
     <property>
         <name>dfs.namenode.http-address</name>
         <value>sandbox.hortonworks.com:50070</value>
+        <final>true</final>
     </property>
 
     <property>
@@ -217,13 +200,9 @@
     </property>
 
     <property>
-        <name>dfs.namenode.inode.attributes.provider.class</name>
-        <value>org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer</value>
-    </property>
-
-    <property>
         <name>dfs.namenode.name.dir</name>
         <value>/hadoop/hdfs/namenode</value>
+        <final>true</final>
     </property>
 
     <property>
@@ -232,13 +211,8 @@
     </property>
 
     <property>
-        <name>dfs.namenode.rpc-address</name>
-        <value>sandbox.hortonworks.com:8020</value>
-    </property>
-
-    <property>
         <name>dfs.namenode.safemode.threshold-pct</name>
-        <value>0.999</value>
+        <value>1.0f</value>
     </property>
 
     <property>
@@ -262,6 +236,16 @@
     </property>
 
     <property>
+        <name>dfs.nfs.exports.allowed.hosts</name>
+        <value>* rw</value>
+    </property>
+
+    <property>
+        <name>dfs.nfs3.dump.dir</name>
+        <value>/tmp/.hdfs-nfs</value>
+    </property>
+
+    <property>
         <name>dfs.permissions.enabled</name>
         <value>true</value>
     </property>
@@ -273,7 +257,7 @@
 
     <property>
         <name>dfs.replication</name>
-        <value>3</value>
+        <value>1</value>
     </property>
 
     <property>
@@ -284,11 +268,13 @@
     <property>
         <name>dfs.support.append</name>
         <value>true</value>
+        <final>true</final>
     </property>
 
     <property>
         <name>dfs.webhdfs.enabled</name>
         <value>true</value>
+        <final>true</final>
     </property>
 
     <property>
@@ -296,14 +282,4 @@
         <value>022</value>
     </property>
 
-    <property>
-        <name>nfs.exports.allowed.hosts</name>
-        <value>* rw</value>
-    </property>
-
-    <property>
-        <name>nfs.file.dump.dir</name>
-        <value>/tmp/.hdfs-nfs</value>
-    </property>
-
-</configuration>
\ No newline at end of file
+</configuration>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/examples/test_case_data/sandbox/hive-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/hive-site.xml b/examples/test_case_data/sandbox/hive-site.xml
index a8c210e..1e78107 100644
--- a/examples/test_case_data/sandbox/hive-site.xml
+++ b/examples/test_case_data/sandbox/hive-site.xml
@@ -22,46 +22,11 @@
     </property>
 
     <property>
-        <name>atlas.cluster.name</name>
-        <value>Sandbox</value>
-    </property>
-
-    <property>
-        <name>atlas.hook.hive.maxThreads</name>
-        <value>1</value>
-    </property>
-
-    <property>
-        <name>atlas.hook.hive.minThreads</name>
-        <value>1</value>
-    </property>
-
-    <property>
-        <name>atlas.hook.hive.synchronous</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>atlas.rest.address</name>
-        <value>http://sandbox.hortonworks.com:21000</value>
-    </property>
-
-    <property>
-        <name>datanucleus.autoCreateSchema</name>
-        <value>false</value>
-    </property>
-
-    <property>
         <name>datanucleus.cache.level2.type</name>
         <value>none</value>
     </property>
 
     <property>
-        <name>datanucleus.fixedDatastore</name>
-        <value>true</value>
-    </property>
-
-    <property>
         <name>hive.auto.convert.join</name>
         <value>true</value>
     </property>
@@ -73,7 +38,7 @@
 
     <property>
         <name>hive.auto.convert.join.noconditionaltask.size</name>
-        <value>357913941</value>
+        <value>1000000000</value>
     </property>
 
     <property>
@@ -162,16 +127,6 @@
     </property>
 
     <property>
-        <name>hive.default.fileformat</name>
-        <value>TextFile</value>
-    </property>
-
-    <property>
-        <name>hive.default.fileformat.managed</name>
-        <value>TextFile</value>
-    </property>
-
-    <property>
         <name>hive.enforce.bucketing</name>
         <value>true</value>
     </property>
@@ -207,6 +162,11 @@
     </property>
 
     <property>
+        <name>hive.exec.failure.hooks</name>
+        <value>org.apache.hadoop.hive.ql.hooks.ATSHook</value>
+    </property>
+
+    <property>
         <name>hive.exec.max.created.files</name>
         <value>100000</value>
     </property>
@@ -237,11 +197,6 @@
     </property>
 
     <property>
-        <name>hive.exec.orc.encoding.strategy</name>
-        <value>SPEED</value>
-    </property>
-
-    <property>
         <name>hive.exec.parallel</name>
         <value>false</value>
     </property>
@@ -252,6 +207,16 @@
     </property>
 
     <property>
+        <name>hive.exec.post.hooks</name>
+        <value>org.apache.hadoop.hive.ql.hooks.ATSHook</value>
+    </property>
+
+    <property>
+        <name>hive.exec.pre.hooks</name>
+        <value>org.apache.hadoop.hive.ql.hooks.ATSHook</value>
+    </property>
+
+    <property>
         <name>hive.exec.reducers.bytes.per.reducer</name>
         <value>67108864</value>
     </property>
@@ -297,6 +262,11 @@
     </property>
 
     <property>
+        <name>hive.heapsize</name>
+        <value>250</value>
+    </property>
+
+    <property>
         <name>hive.limit.optimize.enable</name>
         <value>true</value>
     </property>
@@ -508,7 +478,7 @@
 
     <property>
         <name>hive.prewarm.numcontainers</name>
-        <value>3</value>
+        <value>10</value>
     </property>
 
     <property>
@@ -518,7 +488,7 @@
 
     <property>
         <name>hive.security.authorization.enabled</name>
-        <value>true</value>
+        <value>false</value>
     </property>
 
     <property>
@@ -538,7 +508,7 @@
 
     <property>
         <name>hive.security.metastore.authorization.manager</name>
-        <value>org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider</value>
+        <value>org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider,org.apache.hadoop.hive.ql.security.authorization.MetaStoreAuthzAPIAuthorizerEmbedOnly</value>
     </property>
 
     <property>
@@ -563,7 +533,12 @@
 
     <property>
         <name>hive.server2.enable.doAs</name>
-        <value>false</value>
+        <value>true</value>
+    </property>
+
+    <property>
+        <name>hive.server2.enable.impersonation</name>
+        <value>true</value>
     </property>
 
     <property>
@@ -573,7 +548,7 @@
 
     <property>
         <name>hive.server2.logging.operation.log.location</name>
-        <value>/tmp/hive/operation_logs</value>
+        <value>${system:java.io.tmpdir}/${system:user.name}/operation_logs</value>
     </property>
 
     <property>
@@ -678,7 +653,7 @@
 
     <property>
         <name>hive.tez.container.size</name>
-        <value>1024</value>
+        <value>250</value>
     </property>
 
     <property>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/examples/test_case_data/sandbox/mapred-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/mapred-site.xml b/examples/test_case_data/sandbox/mapred-site.xml
index be470f9..e90f594 100644
--- a/examples/test_case_data/sandbox/mapred-site.xml
+++ b/examples/test_case_data/sandbox/mapred-site.xml
@@ -18,7 +18,7 @@
 
     <property>
         <name>io.sort.mb</name>
-        <value>64</value>
+        <value>128</value>
     </property>
 
     <property>
@@ -27,13 +27,13 @@
     </property>
 
     <property>
-        <name>mapred.job.map.memory.mb</name>
-        <value>250</value>
+        <name>mapreduce.map.memory.mb</name>
+        <value>512</value>
     </property>
 
     <property>
-        <name>mapred.job.reduce.memory.mb</name>
-        <value>250</value>
+        <name>mapreduce.reduce.memory.mb</name>
+        <value>512</value>
     </property>
 
     <property>
@@ -48,9 +48,7 @@
 
     <property>
         <name>mapreduce.admin.user.env</name>
-        <value>
-            LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-amd64-64
-        </value>
+        <value>LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-amd64-64</value>
     </property>
 
     <property>
@@ -60,9 +58,7 @@
 
     <property>
         <name>mapreduce.application.classpath</name>
-        <value>
-            $PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.${hdp.version}.jar:/etc/hadoop/conf/secure
-        </value>
+        <value>/tmp/kylin/*,$HADOOP_CONF_DIR,/usr/hdp/${hdp.version}/hbase/lib/hbase-common.jar,/usr/hdp/current/hive-client/conf/,$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.${hdp.version}.jar:/usr/hdp/${hdp.version}/hadoop/lib/snappy-java-1.0.4.1.jar:/etc/hadoop/conf/secure</value>
     </property>
 
     <property>
@@ -81,18 +77,14 @@
     </property>
 
     <property>
-        <name>mapreduce.job.counters.max</name>
-        <value>130</value>
-    </property>
-
-    <property>
         <name>mapreduce.job.emit-timeline-data</name>
         <value>false</value>
     </property>
 
+    <!--the default value on hdp is 0.05, however for test environments we need to be conservative on resource -->
     <property>
         <name>mapreduce.job.reduce.slowstart.completedmaps</name>
-        <value>0.05</value>
+        <value>1</value>
     </property>
 
     <property>
@@ -116,28 +108,13 @@
     </property>
 
     <property>
-        <name>mapreduce.jobhistory.recovery.enable</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>mapreduce.jobhistory.recovery.store.class</name>
-        <value>org.apache.hadoop.mapreduce.v2.hs.HistoryServerLeveldbStateStoreService</value>
-    </property>
-
-    <property>
-        <name>mapreduce.jobhistory.recovery.store.leveldb.path</name>
-        <value>/hadoop/mapreduce/jhs</value>
-    </property>
-
-    <property>
         <name>mapreduce.jobhistory.webapp.address</name>
         <value>sandbox.hortonworks.com:19888</value>
     </property>
 
     <property>
         <name>mapreduce.map.java.opts</name>
-        <value>-Xmx1228m</value>
+        <value>-Xmx512m</value>
     </property>
 
     <property>
@@ -147,7 +124,7 @@
 
     <property>
         <name>mapreduce.map.memory.mb</name>
-        <value>1536</value>
+        <value>512</value>
     </property>
 
     <property>
@@ -182,7 +159,7 @@
 
     <property>
         <name>mapreduce.reduce.java.opts</name>
-        <value>-Xmx1638m</value>
+        <value>-Xmx200m</value>
     </property>
 
     <property>
@@ -192,7 +169,7 @@
 
     <property>
         <name>mapreduce.reduce.memory.mb</name>
-        <value>2048</value>
+        <value>512</value>
     </property>
 
     <property>
@@ -242,7 +219,7 @@
 
     <property>
         <name>mapreduce.task.io.sort.mb</name>
-        <value>859</value>
+        <value>128</value>
     </property>
 
     <property>
@@ -257,7 +234,7 @@
 
     <property>
         <name>yarn.app.mapreduce.am.command-opts</name>
-        <value>-Xmx819m -Dhdp.version=${hdp.version}</value>
+        <value>-Xmx512m</value>
     </property>
 
     <property>
@@ -267,7 +244,7 @@
 
     <property>
         <name>yarn.app.mapreduce.am.resource.mb</name>
-        <value>1024</value>
+        <value>512</value>
     </property>
 
     <property>
@@ -275,4 +252,4 @@
         <value>/user</value>
     </property>
 
-</configuration>
\ No newline at end of file
+</configuration>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/examples/test_case_data/sandbox/yarn-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/yarn-site.xml b/examples/test_case_data/sandbox/yarn-site.xml
index ebdf44a..8256158 100644
--- a/examples/test_case_data/sandbox/yarn-site.xml
+++ b/examples/test_case_data/sandbox/yarn-site.xml
@@ -18,7 +18,7 @@
 
     <property>
         <name>hadoop.registry.rm.enabled</name>
-        <value>true</value>
+        <value>false</value>
     </property>
 
     <property>
@@ -28,29 +28,22 @@
 
     <property>
         <name>yarn.acl.enable</name>
-        <value>true</value>
+        <value>false</value>
     </property>
 
     <property>
         <name>yarn.admin.acl</name>
-        <value>*</value>
+        <value></value>
     </property>
 
     <property>
         <name>yarn.application.classpath</name>
-        <value>
-            $HADOOP_CONF_DIR,/usr/hdp/current/hadoop-client/*,/usr/hdp/current/hadoop-client/lib/*,/usr/hdp/current/hadoop-hdfs-client/*,/usr/hdp/current/hadoop-hdfs-client/lib/*,/usr/hdp/current/hadoop-yarn-client/*,/usr/hdp/current/hadoop-yarn-client/lib/*
-        </value>
-    </property>
-
-    <property>
-        <name>yarn.authorization-provider</name>
-        <value>org.apache.ranger.authorization.yarn.authorizer.RangerYarnAuthorizer</value>
+        <value>$HADOOP_CONF_DIR,/usr/hdp/current/hadoop-client/*,/usr/hdp/current/hadoop-client/lib/*,/usr/hdp/current/hadoop-hdfs-client/*,/usr/hdp/current/hadoop-hdfs-client/lib/*,/usr/hdp/current/hadoop-yarn-client/*,/usr/hdp/current/hadoop-yarn-client/lib/*</value>
     </property>
 
     <property>
         <name>yarn.client.nodemanager-connect.max-wait-ms</name>
-        <value>120000</value>
+        <value>60000</value>
     </property>
 
     <property>
@@ -79,11 +72,6 @@
     </property>
 
     <property>
-        <name>yarn.node-labels.enabled</name>
-        <value>false</value>
-    </property>
-
-    <property>
         <name>yarn.node-labels.fs-store.retry-policy-spec</name>
         <value>2000, 500</value>
     </property>
@@ -94,6 +82,11 @@
     </property>
 
     <property>
+        <name>yarn.node-labels.manager-class</name>
+        <value>org.apache.hadoop.yarn.server.resourcemanager.nodelabels.MemoryRMNodeLabelsManager</value>
+    </property>
+
+    <property>
         <name>yarn.nodemanager.address</name>
         <value>0.0.0.0:45454</value>
     </property>
@@ -105,7 +98,7 @@
 
     <property>
         <name>yarn.nodemanager.aux-services</name>
-        <value>mapreduce_shuffle,spark_shuffle</value>
+        <value>mapreduce_shuffle</value>
     </property>
 
     <property>
@@ -114,11 +107,6 @@
     </property>
 
     <property>
-        <name>yarn.nodemanager.aux-services.spark_shuffle.class</name>
-        <value>org.apache.spark.network.yarn.YarnShuffleService</value>
-    </property>
-
-    <property>
         <name>yarn.nodemanager.bind-host</name>
         <value>0.0.0.0</value>
     </property>
@@ -160,7 +148,7 @@
 
     <property>
         <name>yarn.nodemanager.health-checker.script.timeout-ms</name>
-        <value>120000</value>
+        <value>60000</value>
     </property>
 
     <property>
@@ -255,12 +243,12 @@
 
     <property>
         <name>yarn.nodemanager.resource.memory-mb</name>
-        <value>7168</value>
+        <value>9216</value>
     </property>
 
     <property>
         <name>yarn.nodemanager.resource.percentage-physical-cpu-limit</name>
-        <value>80</value>
+        <value>100</value>
     </property>
 
     <property>
@@ -349,11 +337,6 @@
     </property>
 
     <property>
-        <name>yarn.resourcemanager.scheduler.monitor.enable</name>
-        <value>false</value>
-    </property>
-
-    <property>
         <name>yarn.resourcemanager.state-store.max-completed-applications</name>
         <value>${yarn.resourcemanager.max-completed-applications}</value>
     </property>
@@ -385,7 +368,7 @@
 
     <property>
         <name>yarn.resourcemanager.webapp.https.address</name>
-        <value>sandbox.hortonworks.com:8090</value>
+        <value>localhost:8090</value>
     </property>
 
     <property>
@@ -425,7 +408,7 @@
 
     <property>
         <name>yarn.resourcemanager.zk-address</name>
-        <value>sandbox.hortonworks.com:2181</value>
+        <value>localhost:2181</value>
     </property>
 
     <property>
@@ -450,22 +433,12 @@
 
     <property>
         <name>yarn.scheduler.maximum-allocation-mb</name>
-        <value>7168</value>
-    </property>
-
-    <property>
-        <name>yarn.scheduler.maximum-allocation-vcores</name>
-        <value>3</value>
+        <value>9216</value>
     </property>
 
     <property>
         <name>yarn.scheduler.minimum-allocation-mb</name>
-        <value>1024</value>
-    </property>
-
-    <property>
-        <name>yarn.scheduler.minimum-allocation-vcores</name>
-        <value>1</value>
+        <value>1536</value>
     </property>
 
     <property>
@@ -494,41 +467,6 @@
     </property>
 
     <property>
-        <name>yarn.timeline-service.entity-group-fs-store.active-dir</name>
-        <value>/ats/active/</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.entity-group-fs-store.cleaner-interval-seconds</name>
-        <value>3600</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.entity-group-fs-store.done-dir</name>
-        <value>/ats/done/</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.entity-group-fs-store.group-id-plugin-classes</name>
-        <value>org.apache.tez.dag.history.logging.ats.TimelineCachePluginImpl</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.entity-group-fs-store.retain-seconds</name>
-        <value>604800</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.entity-group-fs-store.scan-interval-seconds</name>
-        <value>60</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.entity-group-fs-store.summary-store</name>
-        <value>org.apache.hadoop.yarn.server.timeline.RollingLevelDBTimelineStore</value>
-    </property>
-
-    <property>
         <name>yarn.timeline-service.generic-application-history.store-class</name>
         <value>org.apache.hadoop.yarn.server.applicationhistoryservice.NullApplicationHistoryStore</value>
     </property>
@@ -544,11 +482,6 @@
     </property>
 
     <property>
-        <name>yarn.timeline-service.leveldb-state-store.path</name>
-        <value>/hadoop/yarn/timeline</value>
-    </property>
-
-    <property>
         <name>yarn.timeline-service.leveldb-timeline-store.path</name>
         <value>/hadoop/yarn/timeline</value>
     </property>
@@ -574,23 +507,8 @@
     </property>
 
     <property>
-        <name>yarn.timeline-service.plugin.enabled</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.recovery.enabled</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.state-store-class</name>
-        <value>org.apache.hadoop.yarn.server.timeline.recovery.LeveldbTimelineStateStore</value>
-    </property>
-
-    <property>
         <name>yarn.timeline-service.store-class</name>
-        <value>org.apache.hadoop.yarn.server.timeline.EntityGroupFSTimelineStore</value>
+        <value>org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore</value>
     </property>
 
     <property>
@@ -604,11 +522,6 @@
     </property>
 
     <property>
-        <name>yarn.timeline-service.version</name>
-        <value>1.5</value>
-    </property>
-
-    <property>
         <name>yarn.timeline-service.webapp.address</name>
         <value>sandbox.hortonworks.com:8188</value>
     </property>
@@ -618,4 +531,4 @@
         <value>sandbox.hortonworks.com:8190</value>
     </property>
 
-</configuration>
\ No newline at end of file
+</configuration>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
index 726d72f..d43bc1e 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
@@ -32,9 +32,11 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 
 import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HTable;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
@@ -58,7 +60,6 @@ import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
 import org.apache.kylin.source.ISource;
 import org.apache.kylin.source.SourceFactory;
 import org.apache.kylin.source.SourcePartition;
-import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.util.HBaseRegionSizeCalculator;
 import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
 import org.apache.kylin.tool.StorageCleanupJob;
@@ -95,10 +96,10 @@ public class BuildCubeWithEngine {
             logger.error("error", e);
             exitCode = 1;
         }
-
+        
         long millis = System.currentTimeMillis() - start;
         System.out.println("Time elapsed: " + (millis / 1000) + " sec - in " + BuildCubeWithEngine.class.getName());
-
+        
         System.exit(exitCode);
     }
 
@@ -358,10 +359,10 @@ public class BuildCubeWithEngine {
 
     @SuppressWarnings("unused")
     private void checkHFilesInHBase(CubeSegment segment) throws IOException {
-        try (Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl())) {
-            String tableName = segment.getStorageLocationIdentifier();
-
-            HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(tableName, conn);
+        Configuration conf = HBaseConfiguration.create(HadoopUtil.getCurrentConfiguration());
+        String tableName = segment.getStorageLocationIdentifier();
+        try (HTable table = new HTable(conf, tableName)) {
+            HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(table);
             Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
             long totalSize = 0;
             for (Long size : sizeMap.values()) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index bb7fde9..22e2eeb 100644
--- a/pom.xml
+++ b/pom.xml
@@ -46,15 +46,15 @@
         <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
 
         <!-- Hadoop versions -->
-        <hadoop2.version>2.7.1</hadoop2.version>
-        <yarn.version>2.7.1</yarn.version>
+        <hadoop2.version>2.6.0</hadoop2.version>
+        <yarn.version>2.6.0</yarn.version>
 
         <!-- Hive versions -->
-        <hive.version>1.2.1</hive.version>
-        <hive-hcatalog.version>1.2.1</hive-hcatalog.version>
+        <hive.version>0.14.0</hive.version>
+        <hive-hcatalog.version>0.14.0</hive-hcatalog.version>
 
         <!-- HBase versions -->
-        <hbase-hadoop2.version>1.1.1</hbase-hadoop2.version>
+        <hbase-hadoop2.version>0.98.8-hadoop2</hbase-hadoop2.version>
 
         <!-- Kafka versions -->
         <kafka.version>0.10.1.0</kafka.version>
@@ -72,7 +72,7 @@
 
         <!-- Hadoop Common deps, keep compatible with hadoop2.version -->
         <zookeeper.version>3.4.6</zookeeper.version>
-        <curator.version>2.7.1</curator.version>
+        <curator.version>2.6.0</curator.version>
         <jsr305.version>3.0.1</jsr305.version>
         <guava.version>14.0</guava.version>
         <jsch.version>0.1.53</jsch.version>
@@ -844,11 +844,6 @@
             <id>conjars</id>
             <url>http://conjars.org/repo/</url>
         </repository>
-
-        <repository>
-            <id>cloudera</id>
-            <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
-        </repository>
     </repositories>
 
     <build>
@@ -1202,106 +1197,6 @@
             </build>
         </profile>
         <profile>
-            <id>cdh5.7</id>
-            <properties>
-                <hadoop2.version>2.6.0-cdh5.7.0</hadoop2.version>
-                <yarn.version>2.6.0-cdh5.7.0</yarn.version>
-                <hive.version>1.1.0-cdh5.7.0</hive.version>
-                <hive-hcatalog.version>1.1.0-cdh5.7.0</hive-hcatalog.version>
-                <hbase-hadoop2.version>1.2.0-cdh5.7.0</hbase-hadoop2.version>
-                <zookeeper.version>3.4.5-cdh5.7.0</zookeeper.version>
-            </properties>
-            <build>
-                <plugins>
-                    <plugin>
-                        <groupId>org.apache.maven.plugins</groupId>
-                        <artifactId>maven-compiler-plugin</artifactId>
-                        <configuration>
-                            <fork>true</fork>
-                            <meminitial>1024m</meminitial>
-                            <maxmem>2048m</maxmem>
-                        </configuration>
-                    </plugin>
-
-                    <plugin>
-                        <groupId>org.apache.maven.plugins</groupId>
-                        <artifactId>maven-dependency-plugin</artifactId>
-                        <executions>
-                            <execution>
-                                <id>copy-jamm</id>
-                                <goals>
-                                    <goal>copy</goal>
-                                </goals>
-                                <phase>generate-test-resources</phase>
-                                <configuration>
-                                    <artifactItems>
-                                        <artifactItem>
-                                            <groupId>com.github.jbellis</groupId>
-                                            <artifactId>jamm</artifactId>
-                                            <outputDirectory>${project.build.testOutputDirectory}</outputDirectory>
-                                            <destFileName>jamm.jar</destFileName>
-                                        </artifactItem>
-                                    </artifactItems>
-                                </configuration>
-                            </execution>
-                        </executions>
-                    </plugin>
-
-                    <plugin>
-                        <groupId>org.jacoco</groupId>
-                        <artifactId>jacoco-maven-plugin</artifactId>
-                        <configuration>
-                            <append>true</append>
-                            <destFile>
-                                ${sonar.jacoco.reportPath}
-                            </destFile>
-                        </configuration>
-                        <executions>
-                            <execution>
-                                <id>pre-test</id>
-                                <goals>
-                                    <goal>prepare-agent</goal>
-                                </goals>
-                                <configuration>
-                                    <propertyName>surefireArgLine</propertyName>
-                                </configuration>
-                            </execution>
-                            <execution>
-                                <id>post-test</id>
-                                <phase>test</phase>
-                                <goals>
-                                    <goal>report</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                    </plugin>
-
-                    <plugin>
-                        <groupId>org.apache.maven.plugins</groupId>
-                        <artifactId>maven-surefire-plugin</artifactId>
-                        <version>2.19.1</version>
-                        <configuration>
-                            <reportsDirectory>${project.basedir}/../target/surefire-reports</reportsDirectory>
-                            <excludes>
-                                <exclude>**/IT*.java</exclude>
-                            </excludes>
-                            <systemProperties>
-                                <property>
-                                    <name>buildCubeUsingProvidedData</name>
-                                    <value>false</value>
-                                </property>
-                                <property>
-                                    <name>log4j.configuration</name>
-                                    <value>file:${project.basedir}/../build/conf/kylin-tools-log4j.properties</value>
-                                </property>
-                            </systemProperties>
-                            <argLine>-javaagent:${project.build.testOutputDirectory}/jamm.jar ${argLine} ${surefireArgLine}</argLine>
-                        </configuration>
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
-        <profile>
             <!-- This profile adds/overrides few features of the 'apache-release'
                  profile in the parent pom. -->
             <id>apache-release</id>

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
index 8095bf8..ea68855 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
@@ -20,7 +20,7 @@ package org.apache.kylin.rest.security;
 
 import java.io.IOException;
 
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
 
 /**
  */
@@ -36,6 +36,6 @@ public interface AclHBaseStorage {
 
     String prepareHBaseTable(Class<?> clazz) throws IOException;
 
-    Table getTable(String tableName) throws IOException;
+    HTableInterface getTable(String tableName) throws IOException;
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
index cc76b87..d9326f5 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
@@ -21,7 +21,7 @@ package org.apache.kylin.rest.security;
 import java.io.IOException;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.rest.service.AclService;
 import org.apache.kylin.rest.service.QueryService;
@@ -34,8 +34,8 @@ public class MockAclHBaseStorage implements AclHBaseStorage {
     private static final String aclTableName = "MOCK-ACL-TABLE";
     private static final String userTableName = "MOCK-USER-TABLE";
 
-    private Table mockedAclTable;
-    private Table mockedUserTable;
+    private HTableInterface mockedAclTable;
+    private HTableInterface mockedUserTable;
     private RealAclHBaseStorage realAcl;
 
     public MockAclHBaseStorage() {
@@ -65,7 +65,7 @@ public class MockAclHBaseStorage implements AclHBaseStorage {
     }
 
     @Override
-    public Table getTable(String tableName) throws IOException {
+    public HTableInterface getTable(String tableName) throws IOException {
         if (realAcl != null) {
             return realAcl.getTable(tableName);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java b/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
index 972eea9..d0aa0ed 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
@@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;
@@ -91,7 +91,7 @@ import com.google.protobuf.ServiceException;
  *     <li>remove some methods for loading data, checking values ...</li>
  * </ul>
  */
-public class MockHTable implements Table {
+public class MockHTable implements HTableInterface {
     private final String tableName;
     private final List<String> columnFamilies = new ArrayList<>();
 
@@ -114,6 +114,14 @@ public class MockHTable implements Table {
         this.columnFamilies.add(columnFamily);
     }
 
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public byte[] getTableName() {
+        return tableName.getBytes();
+    }
+
     @Override
     public TableName getName() {
         return null;
@@ -192,8 +200,8 @@ public class MockHTable implements Table {
     }
 
     @Override
-    public boolean[] existsAll(List<Get> list) throws IOException {
-        return new boolean[0];
+    public Boolean[] exists(List<Get> gets) throws IOException {
+        return new Boolean[0];
     }
 
     /**
@@ -298,6 +306,15 @@ public class MockHTable implements Table {
      * {@inheritDoc}
      */
     @Override
+    public Result getRowOrBefore(byte[] row, byte[] family) throws IOException {
+        // FIXME: implement
+        return null;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
     public ResultScanner getScanner(Scan scan) throws IOException {
         final List<Result> ret = new ArrayList<Result>();
         byte[] st = scan.getStartRow();
@@ -429,7 +446,7 @@ public class MockHTable implements Table {
              */
         }
         if (filter.hasFilterRow() && !filteredOnRowKey) {
-            filter.filterRow();
+            filter.filterRow(nkvs);
         }
         if (filter.filterRow() || filteredOnRowKey) {
             nkvs.clear();
@@ -518,11 +535,6 @@ public class MockHTable implements Table {
         return false;
     }
 
-    @Override
-    public boolean checkAndPut(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, byte[] bytes3, Put put) throws IOException {
-        return false;
-    }
-
     /**
      * {@inheritDoc}
      */
@@ -543,7 +555,7 @@ public class MockHTable implements Table {
                 continue;
             }
             for (KeyValue kv : delete.getFamilyMap().get(family)) {
-                if (kv.isDelete()) {
+                if (kv.isDeleteFamily()) {
                     data.get(row).get(kv.getFamily()).clear();
                 } else {
                     data.get(row).get(kv.getFamily()).remove(kv.getQualifier());
@@ -580,11 +592,6 @@ public class MockHTable implements Table {
         return false;
     }
 
-    @Override
-    public boolean checkAndDelete(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, byte[] bytes3, Delete delete) throws IOException {
-        return false;
-    }
-
     /**
      * {@inheritDoc}
      */
@@ -598,7 +605,7 @@ public class MockHTable implements Table {
      */
     @Override
     public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount) throws IOException {
-        return incrementColumnValue(row, family, qualifier, amount, null);
+        return incrementColumnValue(row, family, qualifier, amount, true);
     }
 
     @Override
@@ -610,6 +617,37 @@ public class MockHTable implements Table {
      * {@inheritDoc}
      */
     @Override
+    public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount, boolean writeToWAL) throws IOException {
+        if (check(row, family, qualifier, null)) {
+            Put put = new Put(row);
+            put.add(family, qualifier, Bytes.toBytes(amount));
+            put(put);
+            return amount;
+        }
+        long newValue = Bytes.toLong(data.get(row).get(family).get(qualifier).lastEntry().getValue()) + amount;
+        data.get(row).get(family).get(qualifier).put(System.currentTimeMillis(), Bytes.toBytes(newValue));
+        return newValue;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public boolean isAutoFlush() {
+        return true;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public void flushCommits() throws IOException {
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
     public void close() throws IOException {
     }
 
@@ -635,6 +673,29 @@ public class MockHTable implements Table {
      * {@inheritDoc}
      */
     @Override
+    public void setAutoFlush(boolean autoFlush) {
+        throw new NotImplementedException();
+
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public void setAutoFlush(boolean autoFlush, boolean clearBufferOnFail) {
+        throw new NotImplementedException();
+
+    }
+
+    @Override
+    public void setAutoFlushTo(boolean autoFlush) {
+        throw new NotImplementedException();
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
     public long getWriteBufferSize() {
         throw new NotImplementedException();
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
index d1a1384..1d520c4 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
@@ -21,8 +21,7 @@ package org.apache.kylin.rest.security;
 import java.io.IOException;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.rest.service.AclService;
 import org.apache.kylin.rest.service.QueryService;
@@ -59,11 +58,11 @@ public class RealAclHBaseStorage implements AclHBaseStorage {
     }
 
     @Override
-    public Table getTable(String tableName) throws IOException {
+    public HTableInterface getTable(String tableName) throws IOException {
         if (StringUtils.equals(tableName, aclTableName)) {
-            return HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(aclTableName));
+            return HBaseConnection.get(hbaseUrl).getTable(aclTableName);
         } else if (StringUtils.equals(tableName, userTableName)) {
-            return HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
+            return HBaseConnection.get(hbaseUrl).getTable(userTableName);
         } else {
             throw new IllegalStateException("getTable failed" + tableName);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java b/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
index 3e3efec..d693a67 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
@@ -33,7 +33,7 @@ import javax.annotation.PostConstruct;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
@@ -124,7 +124,7 @@ public class AclService implements MutableAclService {
     @Override
     public List<ObjectIdentity> findChildren(ObjectIdentity parentIdentity) {
         List<ObjectIdentity> oids = new ArrayList<ObjectIdentity>();
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
 
@@ -173,7 +173,7 @@ public class AclService implements MutableAclService {
     @Override
     public Map<ObjectIdentity, Acl> readAclsById(List<ObjectIdentity> oids, List<Sid> sids) throws NotFoundException {
         Map<ObjectIdentity, Acl> aclMaps = new HashMap<ObjectIdentity, Acl>();
-        Table htable = null;
+        HTableInterface htable = null;
         Result result = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
@@ -226,16 +226,17 @@ public class AclService implements MutableAclService {
         Authentication auth = SecurityContextHolder.getContext().getAuthentication();
         PrincipalSid sid = new PrincipalSid(auth);
 
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
 
             Put put = new Put(Bytes.toBytes(String.valueOf(objectIdentity.getIdentifier())));
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN), Bytes.toBytes(objectIdentity.getType()));
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN), sidSerializer.serialize(new SidInfo(sid)));
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN), Bytes.toBytes(true));
+            put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN), Bytes.toBytes(objectIdentity.getType()));
+            put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN), sidSerializer.serialize(new SidInfo(sid)));
+            put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN), Bytes.toBytes(true));
 
             htable.put(put);
+            htable.flushCommits();
 
             logger.debug("ACL of " + objectIdentity + " created successfully.");
         } catch (IOException e) {
@@ -249,7 +250,7 @@ public class AclService implements MutableAclService {
 
     @Override
     public void deleteAcl(ObjectIdentity objectIdentity, boolean deleteChildren) throws ChildrenExistException {
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
 
@@ -265,6 +266,7 @@ public class AclService implements MutableAclService {
             }
 
             htable.delete(delete);
+            htable.flushCommits();
 
             logger.debug("ACL of " + objectIdentity + " deleted successfully.");
         } catch (IOException e) {
@@ -282,7 +284,7 @@ public class AclService implements MutableAclService {
             throw e;
         }
 
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
 
@@ -293,16 +295,17 @@ public class AclService implements MutableAclService {
             Put put = new Put(Bytes.toBytes(String.valueOf(acl.getObjectIdentity().getIdentifier())));
 
             if (null != acl.getParentAcl()) {
-                put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN), domainObjSerializer.serialize(new DomainObjectInfo(acl.getParentAcl().getObjectIdentity())));
+                put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN), domainObjSerializer.serialize(new DomainObjectInfo(acl.getParentAcl().getObjectIdentity())));
             }
 
             for (AccessControlEntry ace : acl.getEntries()) {
                 AceInfo aceInfo = new AceInfo(ace);
-                put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY), Bytes.toBytes(aceInfo.getSidInfo().getSid()), aceSerializer.serialize(aceInfo));
+                put.add(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY), Bytes.toBytes(aceInfo.getSidInfo().getSid()), aceSerializer.serialize(aceInfo));
             }
 
             if (!put.isEmpty()) {
                 htable.put(put);
+                htable.flushCommits();
 
                 logger.debug("ACL of " + acl.getObjectIdentity() + " updated successfully.");
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
index c8c87cb..d28c87c 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -27,7 +27,9 @@ import java.util.List;
 import java.util.Map;
 import java.util.WeakHashMap;
 
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.HTable;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.cube.CubeInstance;
@@ -393,24 +395,33 @@ public class CubeService extends BasicService {
         if (htableInfoCache.containsKey(tableName)) {
             return htableInfoCache.get(tableName);
         }
-        Connection conn = HBaseConnection.get(this.getConfig().getStorageUrl());
+
+        Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
+        HTable table = null;
         HBaseResponse hr = null;
         long tableSize = 0;
         int regionCount = 0;
 
-        HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(tableName, conn);
-        Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
+        try {
+            table = new HTable(hconf, tableName);
 
-        for (long s : sizeMap.values()) {
-            tableSize += s;
-        }
+            HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(table);
+            Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
 
-        regionCount = sizeMap.size();
+            for (long s : sizeMap.values()) {
+                tableSize += s;
+            }
+
+            regionCount = sizeMap.size();
+
+            // Set response.
+            hr = new HBaseResponse();
+            hr.setTableSize(tableSize);
+            hr.setRegionCount(regionCount);
+        } finally {
+            IOUtils.closeQuietly(table);
+        }
 
-        // Set response.
-        hr = new HBaseResponse();
-        hr.setTableSize(tableSize);
-        hr.setRegionCount(regionCount);
         htableInfoCache.put(tableName, hr);
 
         return hr;

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
index 122b823..60fc751 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -49,11 +49,11 @@ import javax.sql.DataSource;
 import org.apache.calcite.avatica.ColumnMetaData.Rep;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.QueryContext;
 import org.apache.kylin.common.debug.BackdoorToggles;
@@ -164,13 +164,14 @@ public class QueryService extends BasicService {
         Query[] queryArray = new Query[queries.size()];
 
         byte[] bytes = querySerializer.serialize(queries.toArray(queryArray));
-        Table htable = null;
+        HTableInterface htable = null;
         try {
-            htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
+            htable = HBaseConnection.get(hbaseUrl).getTable(userTableName);
             Put put = new Put(Bytes.toBytes(creator));
-            put.addColumn(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
+            put.add(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
 
             htable.put(put);
+            htable.flushCommits();
         } finally {
             IOUtils.closeQuietly(htable);
         }
@@ -196,13 +197,14 @@ public class QueryService extends BasicService {
 
         Query[] queryArray = new Query[queries.size()];
         byte[] bytes = querySerializer.serialize(queries.toArray(queryArray));
-        Table htable = null;
+        HTableInterface htable = null;
         try {
-            htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
+            htable = HBaseConnection.get(hbaseUrl).getTable(userTableName);
             Put put = new Put(Bytes.toBytes(creator));
-            put.addColumn(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
+            put.add(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
 
             htable.put(put);
+            htable.flushCommits();
         } finally {
             IOUtils.closeQuietly(htable);
         }
@@ -214,12 +216,12 @@ public class QueryService extends BasicService {
         }
 
         List<Query> queries = new ArrayList<Query>();
-        Table htable = null;
+        HTableInterface htable = null;
         try {
-            org.apache.hadoop.hbase.client.Connection conn = HBaseConnection.get(hbaseUrl);
+            HConnection conn = HBaseConnection.get(hbaseUrl);
             HBaseConnection.createHTableIfNeeded(conn, userTableName, USER_QUERY_FAMILY);
 
-            htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
+            htable = conn.getTable(userTableName);
             Get get = new Get(Bytes.toBytes(creator));
             get.addFamily(Bytes.toBytes(USER_QUERY_FAMILY));
             Result result = htable.get(get);

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java b/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
index ab54882..07c7c6f 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
@@ -30,11 +30,11 @@ import javax.annotation.PostConstruct;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.rest.security.AclHBaseStorage;
@@ -72,7 +72,7 @@ public class UserService implements UserDetailsManager {
 
     @Override
     public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);
 
@@ -144,16 +144,16 @@ public class UserService implements UserDetailsManager {
 
     @Override
     public void updateUser(UserDetails user) {
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);
 
             Pair<byte[], byte[]> pair = userToHBaseRow(user);
             Put put = new Put(pair.getKey());
-
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN), pair.getSecond());
+            put.add(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN), pair.getSecond());
 
             htable.put(put);
+            htable.flushCommits();
         } catch (IOException e) {
             throw new RuntimeException(e.getMessage(), e);
         } finally {
@@ -163,13 +163,14 @@ public class UserService implements UserDetailsManager {
 
     @Override
     public void deleteUser(String username) {
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);
 
             Delete delete = new Delete(Bytes.toBytes(username));
 
             htable.delete(delete);
+            htable.flushCommits();
         } catch (IOException e) {
             throw new RuntimeException(e.getMessage(), e);
         } finally {
@@ -184,7 +185,7 @@ public class UserService implements UserDetailsManager {
 
     @Override
     public boolean userExists(String username) {
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);
 
@@ -215,7 +216,7 @@ public class UserService implements UserDetailsManager {
         s.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN));
 
         List<UserDetails> all = new ArrayList<UserDetails>();
-        Table htable = null;
+        HTableInterface htable = null;
         ResultScanner scanner = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
index 73f31c5..2351412 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
@@ -39,9 +39,9 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
 import org.apache.hadoop.hbase.util.Threads;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.kylin.common.KylinConfig;
@@ -63,7 +63,7 @@ public class HBaseConnection {
     private static final Logger logger = LoggerFactory.getLogger(HBaseConnection.class);
 
     private static final Map<String, Configuration> configCache = new ConcurrentHashMap<String, Configuration>();
-    private static final Map<String, Connection> connPool = new ConcurrentHashMap<String, Connection>();
+    private static final Map<String, HConnection> connPool = new ConcurrentHashMap<String, HConnection>();
     private static final ThreadLocal<Configuration> configThreadLocal = new ThreadLocal<>();
 
     private static ExecutorService coprocessorPool = null;
@@ -74,7 +74,7 @@ public class HBaseConnection {
             public void run() {
                 closeCoprocessorPool();
 
-                for (Connection conn : connPool.values()) {
+                for (HConnection conn : connPool.values()) {
                     try {
                         conn.close();
                     } catch (IOException e) {
@@ -143,7 +143,7 @@ public class HBaseConnection {
         // using a hbase:xxx URL is deprecated, instead hbase config is always loaded from hbase-site.xml in classpath
         if (!(StringUtils.isEmpty(url) || "hbase".equals(url)))
             throw new IllegalArgumentException("to use hbase storage, pls set 'kylin.storage.url=hbase' in kylin.properties");
-
+        
         Configuration conf = HBaseConfiguration.create(HadoopUtil.getCurrentConfiguration());
         addHBaseClusterNNHAConfiguration(conf);
 
@@ -207,9 +207,9 @@ public class HBaseConnection {
 
     // ============================================================================
 
-    // returned Connection can be shared by multiple threads and does not require close()
+    // returned HConnection can be shared by multiple threads and does not require close()
     @SuppressWarnings("resource")
-    public static Connection get(String url) {
+    public static HConnection get(String url) {
         // find configuration
         Configuration conf = configCache.get(url);
         if (conf == null) {
@@ -217,13 +217,13 @@ public class HBaseConnection {
             configCache.put(url, conf);
         }
 
-        Connection connection = connPool.get(url);
+        HConnection connection = connPool.get(url);
         try {
             while (true) {
                 // I don't use DCL since recreate a connection is not a big issue.
                 if (connection == null || connection.isClosed()) {
                     logger.info("connection is null or closed, creating a new one");
-                    connection = ConnectionFactory.createConnection(conf);
+                    connection = HConnectionManager.createConnection(conf);
                     connPool.put(url, connection);
                 }
 
@@ -242,8 +242,8 @@ public class HBaseConnection {
         return connection;
     }
 
-    public static boolean tableExists(Connection conn, String tableName) throws IOException {
-        Admin hbase = conn.getAdmin();
+    public static boolean tableExists(HConnection conn, String tableName) throws IOException {
+        HBaseAdmin hbase = new HBaseAdmin(conn);
         try {
             return hbase.tableExists(TableName.valueOf(tableName));
         } finally {
@@ -263,18 +263,18 @@ public class HBaseConnection {
         deleteTable(HBaseConnection.get(hbaseUrl), tableName);
     }
 
-    public static void createHTableIfNeeded(Connection conn, String table, String... families) throws IOException {
-        Admin hbase = conn.getAdmin();
-        TableName tableName = TableName.valueOf(table);
+    public static void createHTableIfNeeded(HConnection conn, String table, String... families) throws IOException {
+        HBaseAdmin hbase = new HBaseAdmin(conn);
+
         try {
             if (tableExists(conn, table)) {
                 logger.debug("HTable '" + table + "' already exists");
-                Set<String> existingFamilies = getFamilyNames(hbase.getTableDescriptor(tableName));
+                Set<String> existingFamilies = getFamilyNames(hbase.getTableDescriptor(TableName.valueOf(table)));
                 boolean wait = false;
                 for (String family : families) {
                     if (existingFamilies.contains(family) == false) {
                         logger.debug("Adding family '" + family + "' to HTable '" + table + "'");
-                        hbase.addColumn(tableName, newFamilyDescriptor(family));
+                        hbase.addColumn(table, newFamilyDescriptor(family));
                         // addColumn() is async, is there a way to wait it finish?
                         wait = true;
                     }
@@ -327,8 +327,8 @@ public class HBaseConnection {
         return fd;
     }
 
-    public static void deleteTable(Connection conn, String tableName) throws IOException {
-        Admin hbase = conn.getAdmin();
+    public static void deleteTable(HConnection conn, String tableName) throws IOException {
+        HBaseAdmin hbase = new HBaseAdmin(conn);
 
         try {
             if (!tableExists(conn, tableName)) {
@@ -338,10 +338,10 @@ public class HBaseConnection {
 
             logger.debug("delete HTable '" + tableName + "'");
 
-            if (hbase.isTableEnabled(TableName.valueOf(tableName))) {
-                hbase.disableTable(TableName.valueOf(tableName));
+            if (hbase.isTableEnabled(tableName)) {
+                hbase.disableTable(tableName);
             }
-            hbase.deleteTable(TableName.valueOf(tableName));
+            hbase.deleteTable(tableName);
 
             logger.debug("HTable '" + tableName + "' deleted");
         } finally {

http://git-wip-us.apache.org/repos/asf/kylin/blob/f4127f47/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
index 74ab017..a44de4f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
@@ -30,15 +30,14 @@ import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.FilterList;
@@ -69,7 +68,7 @@ public class HBaseResourceStore extends ResourceStore {
     final String tableNameBase;
     final String hbaseUrl;
 
-    Connection getConnection() throws IOException {
+    HConnection getConnection() throws IOException {
         return HBaseConnection.get(hbaseUrl);
     }
 
@@ -121,7 +120,7 @@ public class HBaseResourceStore extends ResourceStore {
         byte[] endRow = Bytes.toBytes(lookForPrefix);
         endRow[endRow.length - 1]++;
 
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        HTableInterface table = getConnection().getTable(getAllInOneTableName());
         Scan scan = new Scan(startRow, endRow);
         if ((filter != null && filter instanceof KeyOnlyFilter) == false) {
             scan.addColumn(B_FAMILY, B_COLUMN_TS);
@@ -238,12 +237,13 @@ public class HBaseResourceStore extends ResourceStore {
         IOUtils.copy(content, bout);
         bout.close();
 
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        HTableInterface table = getConnection().getTable(getAllInOneTableName());
         try {
             byte[] row = Bytes.toBytes(resPath);
             Put put = buildPut(resPath, ts, row, bout.toByteArray(), table);
 
             table.put(put);
+            table.flushCommits();
         } finally {
             IOUtils.closeQuietly(table);
         }
@@ -251,7 +251,7 @@ public class HBaseResourceStore extends ResourceStore {
 
     @Override
     protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldTS, long newTS) throws IOException, IllegalStateException {
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        HTableInterface table = getConnection().getTable(getAllInOneTableName());
         try {
             byte[] row = Bytes.toBytes(resPath);
             byte[] bOldTS = oldTS == 0 ? null : Bytes.toBytes(oldTS);
@@ -264,6 +264,8 @@ public class HBaseResourceStore extends ResourceStore {
                 throw new IllegalStateException("Overwriting conflict " + resPath + ", expect old TS " + oldTS + ", but it is " + real);
             }
 
+            table.flushCommits();
+
             return newTS;
         } finally {
             IOUtils.closeQuietly(table);
@@ -272,7 +274,7 @@ public class HBaseResourceStore extends ResourceStore {
 
     @Override
     protected void deleteResourceImpl(String resPath) throws IOException {
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        HTableInterface table = getConnection().getTable(getAllInOneTableName());
         try {
             boolean hdfsResourceExist = false;
             Result result = internalGetFromHTable(table, resPath, true, false);
@@ -285,6 +287,7 @@ public class HBaseResourceStore extends ResourceStore {
 
             Delete del = new Delete(Bytes.toBytes(resPath));
             table.delete(del);
+            table.flushCommits();
 
             if (hdfsResourceExist) { // remove hdfs cell value
                 Path redirectPath = bigCellHDFSPath(resPath);
@@ -305,7 +308,7 @@ public class HBaseResourceStore extends ResourceStore {
     }
 
     private Result getFromHTable(String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        HTableInterface table = getConnection().getTable(getAllInOneTableName());
         try {
             return internalGetFromHTable(table, path, fetchContent, fetchTimestamp);
         } finally {
@@ -314,7 +317,7 @@ public class HBaseResourceStore extends ResourceStore {
 
     }
 
-    private Result internalGetFromHTable(Table table, String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
+    private Result internalGetFromHTable(HTableInterface table, String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
         byte[] rowkey = Bytes.toBytes(path);
 
         Get get = new Get(rowkey);
@@ -333,7 +336,7 @@ public class HBaseResourceStore extends ResourceStore {
         return exists ? result : null;
     }
 
-    private Path writeLargeCellToHdfs(String resPath, byte[] largeColumn, Table table) throws IOException {
+    private Path writeLargeCellToHdfs(String resPath, byte[] largeColumn, HTableInterface table) throws IOException {
         Path redirectPath = bigCellHDFSPath(resPath);
         FileSystem fileSystem = HadoopUtil.getWorkingFileSystem();
 
@@ -358,7 +361,7 @@ public class HBaseResourceStore extends ResourceStore {
         return redirectPath;
     }
 
-    private Put buildPut(String resPath, long ts, byte[] row, byte[] content, Table table) throws IOException {
+    private Put buildPut(String resPath, long ts, byte[] row, byte[] content, HTableInterface table) throws IOException {
         int kvSizeLimit = Integer.parseInt(getConnection().getConfiguration().get("hbase.client.keyvalue.maxsize", "10485760"));
         if (content.length > kvSizeLimit) {
             writeLargeCellToHdfs(resPath, content, table);
@@ -366,8 +369,8 @@ public class HBaseResourceStore extends ResourceStore {
         }
 
         Put put = new Put(row);
-        put.addColumn(B_FAMILY, B_COLUMN, content);
-        put.addColumn(B_FAMILY, B_COLUMN_TS, Bytes.toBytes(ts));
+        put.add(B_FAMILY, B_COLUMN, content);
+        put.add(B_FAMILY, B_COLUMN_TS, Bytes.toBytes(ts));
 
         return put;
     }