You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by li...@apache.org on 2017/02/22 02:31:35 UTC

[01/13] kylin git commit: KYLIN-2417 Compare the performance between HDFSMetaStore and HBaseMetaStore [Forced Update!]

Repository: kylin
Updated Branches:
  refs/heads/master-hbase0.98 f5432f214 -> da9b080f9 (forced update)


KYLIN-2417 Compare the performance between HDFSMetaStore and HBaseMetaStore

Signed-off-by: Li Yang <li...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/65acd70e
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/65acd70e
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/65acd70e

Branch: refs/heads/master-hbase0.98
Commit: 65acd70ed4ff1a6f16a1ede95c1c9b85dc2f5578
Parents: 5d2e6c1
Author: xiefan46 <95...@qq.com>
Authored: Wed Jan 25 17:07:43 2017 +0800
Committer: Li Yang <li...@apache.org>
Committed: Tue Feb 21 10:04:02 2017 +0800

----------------------------------------------------------------------
 .../common/persistence/ResourceStoreTest.java   | 42 +++++++++++++++++++-
 .../storage/hdfs/ITHDFSResourceStoreTest.java   | 19 +++++++++
 2 files changed, 60 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/65acd70e/core-common/src/test/java/org/apache/kylin/common/persistence/ResourceStoreTest.java
----------------------------------------------------------------------
diff --git a/core-common/src/test/java/org/apache/kylin/common/persistence/ResourceStoreTest.java b/core-common/src/test/java/org/apache/kylin/common/persistence/ResourceStoreTest.java
index ddaf481..79a0c30 100644
--- a/core-common/src/test/java/org/apache/kylin/common/persistence/ResourceStoreTest.java
+++ b/core-common/src/test/java/org/apache/kylin/common/persistence/ResourceStoreTest.java
@@ -30,17 +30,34 @@ import java.util.List;
 import java.util.NavigableSet;
 
 import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
- * Be called by LocalFileResourceStoreTest and ITHBaseResourceStoreTest.
+ * Be called by LocalFileResourceStoreTest, ITHBaseResourceStoreTest and ITHDFSResourceStoreTest.
  */
 public class ResourceStoreTest {
 
+    private static final Logger logger = LoggerFactory.getLogger(ResourceStoreTest.class);
+
+    private static final String PERFORMANCE_TEST_ROOT_PATH = "/performance";
+
+    private static final int TEST_RESOURCE_COUNT = 1000;
+
     public static void testAStore(ResourceStore store) throws IOException {
         testBasics(store);
         testGetAllResources(store);
     }
 
+    public static void testPerformance(ResourceStore store) throws IOException {
+        logger.info("Test basic functions");
+        testAStore(store);
+        logger.info("Basic function ok. Start to test performance for class : " + store.getClass());
+        logger.info("Write metadata time : " + testWritePerformance(store));
+        logger.info("Read metadata time  " + testReadPerformance(store));
+        logger.info("Performance test end. Class : " + store.getClass());
+    }
+
     private static void testGetAllResources(ResourceStore store) throws IOException {
         final String folder = "/testFolder";
         List<StringEntity> result;
@@ -144,6 +161,29 @@ public class ResourceStoreTest {
         assertTrue(list == null || list.contains(path2) == false);
     }
 
+    private static long testWritePerformance(ResourceStore store) throws IOException {
+        store.deleteResource(PERFORMANCE_TEST_ROOT_PATH);
+        StringEntity content = new StringEntity("something");
+        long startTime = System.currentTimeMillis();
+        for (int i = 0; i < TEST_RESOURCE_COUNT; i++) {
+            String resourcePath = PERFORMANCE_TEST_ROOT_PATH + "/res_" + i;
+            store.putResource(resourcePath, content, 0, StringEntity.serializer);
+        }
+        return System.currentTimeMillis() - startTime;
+    }
+
+    private static long testReadPerformance(ResourceStore store) throws IOException {
+        long startTime = System.currentTimeMillis();
+        int step = 0; //avoid compiler optimization
+        for (int i = 0; i < TEST_RESOURCE_COUNT; i++) {
+            String resourcePath = PERFORMANCE_TEST_ROOT_PATH + "/res_" + i;
+            StringEntity t = store.getResource(resourcePath, StringEntity.class, StringEntity.serializer);
+            step |= t.toString().length();
+        }
+        logger.info("step : " + step);
+        return System.currentTimeMillis() - startTime;
+    }
+
     @SuppressWarnings("serial")
     public static class StringEntity extends RootPersistentEntity {
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/65acd70e/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java b/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java
index aa5a104..534839f 100644
--- a/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/storage/hdfs/ITHDFSResourceStoreTest.java
@@ -27,8 +27,10 @@ import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.ResourceStoreTest;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
 import org.apache.kylin.common.util.HadoopUtil;
+import org.apache.kylin.storage.hbase.HBaseResourceStore;
 import org.junit.After;
 import org.junit.Before;
+import org.junit.Ignore;
 import org.junit.Test;
 
 public class ITHDFSResourceStoreTest extends HBaseMetadataTestCase {
@@ -84,4 +86,21 @@ public class ITHDFSResourceStoreTest extends HBaseMetadataTestCase {
         assertTrue(fs.exists(new Path(path)));
     }
 
+    @Ignore
+    @Test
+    public void performanceTest() throws Exception{
+        //test hdfs performance
+        String oldUrl = kylinConfig.getMetadataUrl();
+        kylinConfig.setProperty("kylin.metadata.url", "kylin_metadata@hdfs");
+        HDFSResourceStore store = new HDFSResourceStore(kylinConfig);
+        ResourceStoreTest.testPerformance(store);
+        kylinConfig.setProperty("kylin.metadata.url", oldUrl);
+
+        //test hbase
+        oldUrl = kylinConfig.getMetadataUrl();
+        kylinConfig.setProperty("kylin.metadata.url", "kylin_metadata@hbase");
+        HBaseResourceStore store2 = new HBaseResourceStore(kylinConfig);
+        ResourceStoreTest.testPerformance(store2);
+        kylinConfig.setProperty("kylin.metadata.url", oldUrl);
+    }
 }


[12/13] kylin git commit: KYLIN-2307 Create a branch for master with HBase 0.98 API

Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
index b12173d..51b21b3 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
@@ -26,13 +26,12 @@ import java.util.NoSuchElementException;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.BufferedMutator;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.kv.RowConstants;
@@ -87,13 +86,14 @@ public class SimpleHBaseStore implements IGTStore {
     }
 
     private class Writer implements IGTWriter {
-        final BufferedMutator table;
+        final HTableInterface table;
         final ByteBuffer rowkey = ByteBuffer.allocate(50);
         final ByteBuffer value = ByteBuffer.allocate(50);
 
         Writer() throws IOException {
-            Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-            table = conn.getBufferedMutator(htableName);
+            HConnection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+            table = conn.getTable(htableName);
+            table.setAutoFlush(false, true);
         }
 
         @Override
@@ -113,24 +113,24 @@ public class SimpleHBaseStore implements IGTStore {
 
             Put put = new Put(rowkey);
             put.addImmutable(CF_B, ByteBuffer.wrap(COL_B), HConstants.LATEST_TIMESTAMP, value);
-            table.mutate(put);
+            table.put(put);
         }
 
         @Override
         public void close() throws IOException {
-            table.flush();
+            table.flushCommits();
             table.close();
         }
     }
 
     class Reader implements IGTScanner {
-        final Table table;
+        final HTableInterface table;
         final ResultScanner scanner;
 
         int count = 0;
 
         Reader() throws IOException {
-            Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+            HConnection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
             table = conn.getTable(htableName);
 
             Scan scan = new Scan();

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
index 82b67b6..1cd6694 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
@@ -26,9 +26,8 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.zip.DataFormatException;
 
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
 import org.apache.hadoop.hbase.ipc.ServerRpcController;
@@ -52,10 +51,10 @@ import org.apache.kylin.storage.gtrecord.StorageResponseGTScatter;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest;
-import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse;
-import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse.Stats;
 import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitService;
+import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList;
+import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse.Stats;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -117,7 +116,7 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
         final ImmutableBitSet selectedColBlocks = scanRequest.getSelectedColBlocks().set(0);
 
         // globally shared connection, does not require close
-        final Connection conn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
+        final HConnection conn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
 
         final List<IntList> hbaseColumnsToGTIntList = Lists.newArrayList();
         List<List<Integer>> hbaseColumnsToGT = getHBaseColumnsGTMapping(selectedColBlocks);
@@ -172,7 +171,7 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
                     final AtomicReference<RuntimeException> regionErrorHolder = new AtomicReference<>();
 
                     try {
-                        Table table = conn.getTable(TableName.valueOf(cubeSeg.getStorageLocationIdentifier()), HBaseConnection.getCoprocessorPool());
+                        HTableInterface table = conn.getTable(cubeSeg.getStorageLocationIdentifier(), HBaseConnection.getCoprocessorPool());
 
                         final CubeVisitRequest request = builder.build();
                         final byte[] startKey = epRange.getFirst();

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
index 33f8d90..a24d517 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
@@ -25,12 +25,11 @@ import java.util.List;
 
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.util.BytesUtil;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.common.util.ShardingHash;
@@ -132,8 +131,8 @@ public class CubeHBaseScanRPC extends CubeHBaseRPC {
         // primary key (also the 0th column block) is always selected
         final ImmutableBitSet selectedColBlocks = scanRequest.getSelectedColBlocks().set(0);
         // globally shared connection, does not require close
-        Connection hbaseConn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
-        final Table hbaseTable = hbaseConn.getTable(TableName.valueOf(cubeSeg.getStorageLocationIdentifier()));
+        HConnection hbaseConn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
+        final HTableInterface hbaseTable = hbaseConn.getTable(cubeSeg.getStorageLocationIdentifier());
 
         List<RawScan> rawScans = preparedHBaseScans(scanRequest.getGTScanRanges(), selectedColBlocks);
         List<List<Integer>> hbaseColumnsToGT = getHBaseColumnsGTMapping(selectedColBlocks);

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
index cde127e..1e550f9 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
@@ -195,7 +195,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
         if (shardLength == 0) {
             return;
         }
-        byte[] regionStartKey = ArrayUtils.isEmpty(region.getRegionInfo().getStartKey()) ? new byte[shardLength] : region.getRegionInfo().getStartKey();
+        byte[] regionStartKey = ArrayUtils.isEmpty(region.getStartKey()) ? new byte[shardLength] : region.getStartKey();
         Bytes.putBytes(rawScan.startKey, 0, regionStartKey, 0, shardLength);
         Bytes.putBytes(rawScan.endKey, 0, regionStartKey, 0, shardLength);
     }
@@ -234,7 +234,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
         try (SetThreadName ignored = new SetThreadName("Query %s", queryId)) {
             this.serviceStartTime = System.currentTimeMillis();
 
-            region = (HRegion)env.getRegion();
+            region = env.getRegion();
             region.startRegionOperation();
 
             // if user change kylin.properties on kylin server, need to manually redeploy coprocessor jar to update KylinConfig of Env.

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
index feb4842..2814ad6 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
@@ -26,8 +26,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.regionserver.BloomType;
@@ -80,8 +79,7 @@ public class CubeHTableUtil {
         tableDesc.setValue(IRealizationConstants.HTableSegmentTag, cubeSegment.toString());
 
         Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
-        Connection conn = HBaseConnection.get(kylinConfig.getStorageUrl());
-        Admin admin = conn.getAdmin();
+        HBaseAdmin admin = new HBaseAdmin(conf);
 
         try {
             if (User.isHBaseSecurityEnabled(conf)) {
@@ -94,7 +92,7 @@ public class CubeHTableUtil {
                 tableDesc.addFamily(cf);
             }
 
-            if (admin.tableExists(TableName.valueOf(tableName))) {
+            if (admin.tableExists(tableName)) {
                 // admin.disableTable(tableName);
                 // admin.deleteTable(tableName);
                 throw new RuntimeException("HBase table " + tableName + " exists!");
@@ -103,7 +101,7 @@ public class CubeHTableUtil {
             DeployCoprocessorCLI.deployCoprocessor(tableDesc);
 
             admin.createTable(tableDesc, splitKeys);
-            Preconditions.checkArgument(admin.isTableAvailable(TableName.valueOf(tableName)), "table " + tableName + " created, but is not available due to some reasons");
+            Preconditions.checkArgument(admin.isTableAvailable(tableName), "table " + tableName + " created, but is not available due to some reasons");
             logger.info("create hbase table " + tableName + " done.");
         } finally {
             IOUtils.closeQuietly(admin);
@@ -112,7 +110,8 @@ public class CubeHTableUtil {
     }
 
     public static void deleteHTable(TableName tableName) throws IOException {
-        Admin admin = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl()).getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        HBaseAdmin admin = new HBaseAdmin(conf);
         try {
             if (admin.tableExists(tableName)) {
                 logger.info("disabling hbase table " + tableName);
@@ -127,7 +126,8 @@ public class CubeHTableUtil {
 
     /** create a HTable that has the same performance settings as normal cube table, for benchmark purpose */
     public static void createBenchmarkHTable(TableName tableName, String cfName) throws IOException {
-        Admin admin = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl()).getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        HBaseAdmin admin = new HBaseAdmin(conf);
         try {
             if (admin.tableExists(tableName)) {
                 logger.info("disabling hbase table " + tableName);

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
index df3cf08..eacff9f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
@@ -25,13 +25,13 @@ import java.util.List;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.HadoopUtil;
 import org.apache.kylin.common.util.HiveCmdBuilder;
 import org.apache.kylin.job.exception.ExecuteException;
@@ -100,21 +100,19 @@ public class DeprecatedGCStep extends AbstractExecutable {
         List<String> oldTables = getOldHTables();
         if (oldTables != null && oldTables.size() > 0) {
             String metadataUrlPrefix = KylinConfig.getInstanceFromEnv().getMetadataUrlPrefix();
-            Admin admin = null;
+            Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+            HBaseAdmin admin = null;
             try {
-
-                Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-                admin = conn.getAdmin();
-
+                admin = new HBaseAdmin(conf);
                 for (String table : oldTables) {
-                    if (admin.tableExists(TableName.valueOf(table))) {
-                        HTableDescriptor tableDescriptor = admin.getTableDescriptor(TableName.valueOf(table));
+                    if (admin.tableExists(table)) {
+                        HTableDescriptor tableDescriptor = admin.getTableDescriptor(Bytes.toBytes(table));
                         String host = tableDescriptor.getValue(IRealizationConstants.HTableTag);
                         if (metadataUrlPrefix.equalsIgnoreCase(host)) {
-                            if (admin.isTableEnabled(TableName.valueOf(table))) {
-                                admin.disableTable(TableName.valueOf(table));
+                            if (admin.isTableEnabled(table)) {
+                                admin.disableTable(table);
                             }
-                            admin.deleteTable(TableName.valueOf(table));
+                            admin.deleteTable(table);
                             logger.debug("Dropped HBase table " + table);
                             output.append("Dropped HBase table " + table + " \n");
                         } else {

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
index 6587d4e..d5b36df 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
@@ -23,8 +23,8 @@ import java.util.List;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.cube.cuboid.Cuboid;
@@ -49,7 +49,7 @@ public class HBaseCuboidWriter implements ICuboidWriter {
 
     private final List<KeyValueCreator> keyValueCreators;
     private final int nColumns;
-    private final Table hTable;
+    private final HTableInterface hTable;
     private final CubeDesc cubeDesc;
     private final CubeSegment cubeSegment;
     private final Object[] measureValues;
@@ -58,7 +58,7 @@ public class HBaseCuboidWriter implements ICuboidWriter {
     private AbstractRowKeyEncoder rowKeyEncoder;
     private byte[] keybuf;
 
-    public HBaseCuboidWriter(CubeSegment segment, Table hTable) {
+    public HBaseCuboidWriter(CubeSegment segment, HTableInterface hTable) {
         this.keyValueCreators = Lists.newArrayList();
         this.cubeSegment = segment;
         this.cubeDesc = cubeSegment.getCubeDesc();
@@ -117,6 +117,7 @@ public class HBaseCuboidWriter implements ICuboidWriter {
             long t = System.currentTimeMillis();
             if (hTable != null) {
                 hTable.put(puts);
+                hTable.flushCommits();
             }
             logger.info("commit total " + puts.size() + " puts, totally cost:" + (System.currentTimeMillis() - t) + "ms");
             puts.clear();

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
index 2f7e164..5b2441c 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
@@ -24,11 +24,11 @@ import java.util.Collections;
 import java.util.List;
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.job.exception.ExecuteException;
 import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableContext;
@@ -69,20 +69,19 @@ public class MergeGCStep extends AbstractExecutable {
         List<String> oldTables = getOldHTables();
         if (oldTables != null && oldTables.size() > 0) {
             String metadataUrlPrefix = KylinConfig.getInstanceFromEnv().getMetadataUrlPrefix();
-            Admin admin = null;
+            Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+            HBaseAdmin admin = null;
             try {
-                Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-                admin = conn.getAdmin();
-
+                admin = new HBaseAdmin(conf);
                 for (String table : oldTables) {
-                    if (admin.tableExists(TableName.valueOf(table))) {
-                        HTableDescriptor tableDescriptor = admin.getTableDescriptor(TableName.valueOf((table)));
+                    if (admin.tableExists(table)) {
+                        HTableDescriptor tableDescriptor = admin.getTableDescriptor(Bytes.toBytes(table));
                         String host = tableDescriptor.getValue(IRealizationConstants.HTableTag);
                         if (metadataUrlPrefix.equalsIgnoreCase(host)) {
-                            if (admin.isTableEnabled(TableName.valueOf(table))) {
-                                admin.disableTable(TableName.valueOf(table));
+                            if (admin.isTableEnabled(table)) {
+                                admin.disableTable(table);
                             }
-                            admin.deleteTable(TableName.valueOf(table));
+                            admin.deleteTable(table);
                             logger.debug("Dropped htable: " + table);
                             output.append("HBase table " + table + " is dropped. \n");
                         } else {

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
index 56f867a..a150607 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
@@ -21,11 +21,9 @@ package org.apache.kylin.storage.hbase.util;
 import java.io.IOException;
 
 import org.apache.commons.cli.Options;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
@@ -40,8 +38,8 @@ public class CleanHtableCLI extends AbstractApplication {
     protected static final Logger logger = LoggerFactory.getLogger(CleanHtableCLI.class);
 
     private void clean() throws IOException {
-        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-        Admin hbaseAdmin = conn.getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
 
         for (HTableDescriptor descriptor : hbaseAdmin.listTables()) {
             String name = descriptor.getNameAsString().toLowerCase();
@@ -52,7 +50,7 @@ public class CleanHtableCLI extends AbstractApplication {
                 System.out.println();
 
                 descriptor.setValue(IRealizationConstants.HTableOwner, "DL-eBay-Kylin@ebay.com");
-                hbaseAdmin.modifyTable(TableName.valueOf(descriptor.getNameAsString()), descriptor);
+                hbaseAdmin.modifyTable(descriptor.getNameAsString(), descriptor);
             }
         }
         hbaseAdmin.close();

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
index 581de38..68c0a39 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
@@ -26,19 +26,19 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.RawResource;
@@ -89,7 +89,7 @@ public class CubeMigrationCLI {
     private static ResourceStore srcStore;
     private static ResourceStore dstStore;
     private static FileSystem hdfsFS;
-    private static Admin hbaseAdmin;
+    private static HBaseAdmin hbaseAdmin;
 
     public static final String ACL_INFO_FAMILY = "i";
     private static final String ACL_TABLE_NAME = "_acl";
@@ -134,8 +134,8 @@ public class CubeMigrationCLI {
 
         checkAndGetHbaseUrl();
 
-        Connection conn = HBaseConnection.get(srcConfig.getStorageUrl());
-        hbaseAdmin = conn.getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        hbaseAdmin = new HBaseAdmin(conf);
 
         hdfsFS = HadoopUtil.getWorkingFileSystem();
 
@@ -233,7 +233,6 @@ public class CubeMigrationCLI {
             operations.add(new Opt(OptType.COPY_DICT_OR_SNAPSHOT, new Object[] { item, cube.getName() }));
         }
     }
-
     private static void addCubeAndModelIntoProject(CubeInstance srcCube, String cubeName, String projectName) throws IOException {
         String projectResPath = ProjectInstance.concatResourcePath(projectName);
         if (!dstStore.exists(projectResPath))
@@ -327,8 +326,8 @@ public class CubeMigrationCLI {
 
         switch (opt.type) {
         case CHANGE_HTABLE_HOST: {
-            TableName tableName = TableName.valueOf((String) opt.params[0]);
-            HTableDescriptor desc = hbaseAdmin.getTableDescriptor(tableName);
+            String tableName = (String) opt.params[0];
+            HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
             hbaseAdmin.disableTable(tableName);
             desc.setValue(IRealizationConstants.HTableTag, dstConfig.getMetadataUrlPrefix());
             hbaseAdmin.modifyTable(tableName, desc);
@@ -450,11 +449,11 @@ public class CubeMigrationCLI {
             Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
             ProjectInstance project = dstStore.getResource(projectResPath, ProjectInstance.class, projectSerializer);
             String projUUID = project.getUuid();
-            Table srcAclHtable = null;
-            Table destAclHtable = null;
+            HTableInterface srcAclHtable = null;
+            HTableInterface destAclHtable = null;
             try {
-                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(TableName.valueOf(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
 
                 // cube acl
                 Result result = srcAclHtable.get(new Get(Bytes.toBytes(cubeId)));
@@ -474,6 +473,7 @@ public class CubeMigrationCLI {
                         destAclHtable.put(put);
                     }
                 }
+                destAclHtable.flushCommits();
             } finally {
                 IOUtils.closeQuietly(srcAclHtable);
                 IOUtils.closeQuietly(destAclHtable);
@@ -504,8 +504,8 @@ public class CubeMigrationCLI {
 
         switch (opt.type) {
         case CHANGE_HTABLE_HOST: {
-            TableName tableName = TableName.valueOf((String) opt.params[0]);
-            HTableDescriptor desc = hbaseAdmin.getTableDescriptor(tableName);
+            String tableName = (String) opt.params[0];
+            HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
             hbaseAdmin.disableTable(tableName);
             desc.setValue(IRealizationConstants.HTableTag, srcConfig.getMetadataUrlPrefix());
             hbaseAdmin.modifyTable(tableName, desc);
@@ -539,12 +539,13 @@ public class CubeMigrationCLI {
         case COPY_ACL: {
             String cubeId = (String) opt.params[0];
             String modelId = (String) opt.params[1];
-            Table destAclHtable = null;
+            HTableInterface destAclHtable = null;
             try {
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
 
                 destAclHtable.delete(new Delete(Bytes.toBytes(cubeId)));
                 destAclHtable.delete(new Delete(Bytes.toBytes(modelId)));
+                destAclHtable.flushCommits();
             } finally {
                 IOUtils.closeQuietly(destAclHtable);
             }
@@ -561,7 +562,7 @@ public class CubeMigrationCLI {
         }
     }
 
-    private static void updateMeta(KylinConfig config) {
+    private static void updateMeta(KylinConfig config){
         String[] nodes = config.getRestServers();
         for (String node : nodes) {
             RestClient restClient = new RestClient(node);

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
index 20d0f7d..8bd4abf 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
@@ -26,10 +26,10 @@ import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.cube.CubeInstance;
@@ -61,7 +61,7 @@ public class CubeMigrationCheckCLI {
     private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("The name of cube migrated").create("cube");
 
     private KylinConfig dstCfg;
-    private Admin hbaseAdmin;
+    private HBaseAdmin hbaseAdmin;
 
     private List<String> issueExistHTables;
     private List<String> inconsistentHTables;
@@ -130,8 +130,9 @@ public class CubeMigrationCheckCLI {
         this.dstCfg = kylinConfig;
         this.ifFix = isFix;
 
-        Connection conn = HBaseConnection.get(kylinConfig.getStorageUrl());
-        hbaseAdmin = conn.getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        hbaseAdmin = new HBaseAdmin(conf);
+
         issueExistHTables = Lists.newArrayList();
         inconsistentHTables = Lists.newArrayList();
     }
@@ -188,10 +189,10 @@ public class CubeMigrationCheckCLI {
                 String[] sepNameList = segFullName.split(",");
                 HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0]));
                 logger.info("Change the host of htable " + sepNameList[0] + "belonging to cube " + sepNameList[1] + " from " + desc.getValue(IRealizationConstants.HTableTag) + " to " + dstCfg.getMetadataUrlPrefix());
-                hbaseAdmin.disableTable(TableName.valueOf(sepNameList[0]));
+                hbaseAdmin.disableTable(sepNameList[0]);
                 desc.setValue(IRealizationConstants.HTableTag, dstCfg.getMetadataUrlPrefix());
-                hbaseAdmin.modifyTable(TableName.valueOf(sepNameList[0]), desc);
-                hbaseAdmin.enableTable(TableName.valueOf(sepNameList[0]));
+                hbaseAdmin.modifyTable(sepNameList[0], desc);
+                hbaseAdmin.enableTable(sepNameList[0]);
             }
         } else {
             logger.info("------ Inconsistent HTables Needed To Be Fixed ------");

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
index 29c738e..c9e969f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
@@ -44,8 +44,7 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.KylinVersion;
@@ -86,8 +85,7 @@ public class DeployCoprocessorCLI {
         KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
         FileSystem fileSystem = FileSystem.get(hconf);
-        Connection conn = HBaseConnection.get(kylinConfig.getStorageUrl());
-        Admin hbaseAdmin = conn.getAdmin();
+        HBaseAdmin hbaseAdmin = new HBaseAdmin(hconf);
 
         String localCoprocessorJar;
         if ("default".equals(args[0])) {
@@ -205,10 +203,10 @@ public class DeployCoprocessorCLI {
     public static void deployCoprocessor(HTableDescriptor tableDesc) {
         try {
             initHTableCoprocessor(tableDesc);
-            logger.info("hbase table " + tableDesc.getTableName() + " deployed with coprocessor.");
+            logger.info("hbase table " + tableDesc.getName() + " deployed with coprocessor.");
 
         } catch (Exception ex) {
-            logger.error("Error deploying coprocessor on " + tableDesc.getTableName(), ex);
+            logger.error("Error deploying coprocessor on " + tableDesc.getName(), ex);
             logger.error("Will try creating the table without coprocessor.");
         }
     }
@@ -229,7 +227,7 @@ public class DeployCoprocessorCLI {
         desc.addCoprocessor(CubeEndpointClass, hdfsCoprocessorJar, 1001, null);
     }
 
-    public static boolean resetCoprocessor(String tableName, Admin hbaseAdmin, Path hdfsCoprocessorJar) throws IOException {
+    public static boolean resetCoprocessor(String tableName, HBaseAdmin hbaseAdmin, Path hdfsCoprocessorJar) throws IOException {
         KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
         HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
 
@@ -244,7 +242,7 @@ public class DeployCoprocessorCLI {
         logger.info("reset coprocessor on " + tableName);
 
         logger.info("Disable " + tableName);
-        hbaseAdmin.disableTable(TableName.valueOf(tableName));
+        hbaseAdmin.disableTable(tableName);
 
         while (desc.hasCoprocessor(CubeObserverClassOld2)) {
             desc.removeCoprocessor(CubeObserverClassOld2);
@@ -270,15 +268,16 @@ public class DeployCoprocessorCLI {
             desc.setValue(IRealizationConstants.HTableGitTag, commitInfo);
         }
 
-        hbaseAdmin.modifyTable(TableName.valueOf(tableName), desc);
+        hbaseAdmin.modifyTable(tableName, desc);
 
         logger.info("Enable " + tableName);
-        hbaseAdmin.enableTable(TableName.valueOf(tableName));
+        hbaseAdmin.enableTable(tableName);
 
         return true;
     }
 
-    private static List<String> resetCoprocessorOnHTables(final Admin hbaseAdmin, final Path hdfsCoprocessorJar, List<String> tableNames) throws IOException {
+
+    private static List<String> resetCoprocessorOnHTables(final HBaseAdmin hbaseAdmin, final Path hdfsCoprocessorJar, List<String> tableNames) throws IOException {
         List<String> processedTables = Collections.synchronizedList(new ArrayList<String>());
         ExecutorService coprocessorPool = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() * 2);
         CountDownLatch countDownLatch = new CountDownLatch(tableNames.size());
@@ -299,12 +298,12 @@ public class DeployCoprocessorCLI {
 
     private static class ResetCoprocessorWorker implements Runnable {
         private final CountDownLatch countDownLatch;
-        private final Admin hbaseAdmin;
+        private final HBaseAdmin hbaseAdmin;
         private final Path hdfsCoprocessorJar;
         private final String tableName;
         private final List<String> processedTables;
 
-        public ResetCoprocessorWorker(CountDownLatch countDownLatch, Admin hbaseAdmin, Path hdfsCoprocessorJar, String tableName, List<String> processedTables) {
+        public ResetCoprocessorWorker(CountDownLatch countDownLatch, HBaseAdmin hbaseAdmin, Path hdfsCoprocessorJar, String tableName, List<String> processedTables) {
             this.countDownLatch = countDownLatch;
             this.hbaseAdmin = hbaseAdmin;
             this.hdfsCoprocessorJar = hdfsCoprocessorJar;
@@ -425,7 +424,7 @@ public class DeployCoprocessorCLI {
         return coprocessorDir;
     }
 
-    private static Set<String> getCoprocessorJarPaths(Admin hbaseAdmin, List<String> tableNames) throws IOException {
+    private static Set<String> getCoprocessorJarPaths(HBaseAdmin hbaseAdmin, List<String> tableNames) throws IOException {
         HashSet<String> result = new HashSet<String>();
 
         for (String tableName : tableNames) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
index 1cdb2f8..61c73d5 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
@@ -25,11 +25,10 @@ import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -236,9 +235,9 @@ public class ExtendCubeToHybridCLI {
         Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
         ProjectInstance project = store.getResource(projectResPath, ProjectInstance.class, projectSerializer);
         String projUUID = project.getUuid();
-        Table aclHtable = null;
+        HTableInterface aclHtable = null;
         try {
-            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(TableName.valueOf(kylinConfig.getMetadataUrlPrefix() + "_acl"));
+            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(kylinConfig.getMetadataUrlPrefix() + "_acl");
 
             // cube acl
             Result result = aclHtable.get(new Get(Bytes.toBytes(origCubeId)));
@@ -258,6 +257,7 @@ public class ExtendCubeToHybridCLI {
                     aclHtable.put(put);
                 }
             }
+            aclHtable.flushCommits();
         } finally {
             IOUtils.closeQuietly(aclHtable);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
index dd5f8fa..86ba22f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
@@ -28,13 +28,13 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.Pair;
@@ -75,7 +75,7 @@ public class GridTableHBaseBenchmark {
         System.out.println("Testing grid table scanning, hit ratio " + hitRatio + ", index ratio " + indexRatio);
         String hbaseUrl = "hbase"; // use hbase-site.xml on classpath
 
-        Connection conn = HBaseConnection.get(hbaseUrl);
+        HConnection conn = HBaseConnection.get(hbaseUrl);
         createHTableIfNeeded(conn, TEST_TABLE);
         prepareData(conn);
 
@@ -91,10 +91,10 @@ public class GridTableHBaseBenchmark {
 
     }
 
-    private static void testColumnScan(Connection conn, List<Pair<Integer, Integer>> colScans) throws IOException {
+    private static void testColumnScan(HConnection conn, List<Pair<Integer, Integer>> colScans) throws IOException {
         Stats stats = new Stats("COLUMN_SCAN");
 
-        Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
+        HTableInterface table = conn.getTable(TEST_TABLE);
         try {
             stats.markStart();
 
@@ -122,20 +122,20 @@ public class GridTableHBaseBenchmark {
         }
     }
 
-    private static void testRowScanNoIndexFullScan(Connection conn, boolean[] hits) throws IOException {
+    private static void testRowScanNoIndexFullScan(HConnection conn, boolean[] hits) throws IOException {
         fullScan(conn, hits, new Stats("ROW_SCAN_NO_IDX_FULL"));
     }
 
-    private static void testRowScanNoIndexSkipScan(Connection conn, boolean[] hits) throws IOException {
+    private static void testRowScanNoIndexSkipScan(HConnection conn, boolean[] hits) throws IOException {
         jumpScan(conn, hits, new Stats("ROW_SCAN_NO_IDX_SKIP"));
     }
 
-    private static void testRowScanWithIndex(Connection conn, boolean[] hits) throws IOException {
+    private static void testRowScanWithIndex(HConnection conn, boolean[] hits) throws IOException {
         jumpScan(conn, hits, new Stats("ROW_SCAN_IDX"));
     }
 
-    private static void fullScan(Connection conn, boolean[] hits, Stats stats) throws IOException {
-        Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
+    private static void fullScan(HConnection conn, boolean[] hits, Stats stats) throws IOException {
+        HTableInterface table = conn.getTable(TEST_TABLE);
         try {
             stats.markStart();
 
@@ -156,11 +156,11 @@ public class GridTableHBaseBenchmark {
         }
     }
 
-    private static void jumpScan(Connection conn, boolean[] hits, Stats stats) throws IOException {
+    private static void jumpScan(HConnection conn, boolean[] hits, Stats stats) throws IOException {
 
         final int jumpThreshold = 6; // compensate for Scan() overhead, totally by experience
 
-        Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
+        HTableInterface table = conn.getTable(TEST_TABLE);
         try {
 
             stats.markStart();
@@ -204,8 +204,8 @@ public class GridTableHBaseBenchmark {
         }
     }
 
-    private static void prepareData(Connection conn) throws IOException {
-        Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
+    private static void prepareData(HConnection conn) throws IOException {
+        HTableInterface table = conn.getTable(TEST_TABLE);
 
         try {
             // check how many rows existing
@@ -258,8 +258,8 @@ public class GridTableHBaseBenchmark {
         return bytes;
     }
 
-    private static void createHTableIfNeeded(Connection conn, String tableName) throws IOException {
-        Admin hbase = conn.getAdmin();
+    private static void createHTableIfNeeded(HConnection conn, String tableName) throws IOException {
+        HBaseAdmin hbase = new HBaseAdmin(conn);
 
         try {
             boolean tableExist = false;

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
index 940d64a..6749d6c 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
@@ -24,11 +24,9 @@ import java.util.List;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
@@ -57,8 +55,8 @@ public class HBaseClean extends AbstractApplication {
     private void cleanUp() {
         try {
             // get all kylin hbase tables
-            Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-            Admin hbaseAdmin = conn.getAdmin();
+            Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+            HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
             String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
             HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
             List<String> allTablesNeedToBeDropped = Lists.newArrayList();
@@ -73,12 +71,12 @@ public class HBaseClean extends AbstractApplication {
                 // drop tables
                 for (String htableName : allTablesNeedToBeDropped) {
                     logger.info("Deleting HBase table " + htableName);
-                    if (hbaseAdmin.tableExists(TableName.valueOf(htableName))) {
-                        if (hbaseAdmin.isTableEnabled(TableName.valueOf(htableName))) {
-                            hbaseAdmin.disableTable(TableName.valueOf(htableName));
+                    if (hbaseAdmin.tableExists(htableName)) {
+                        if (hbaseAdmin.isTableEnabled(htableName)) {
+                            hbaseAdmin.disableTable(htableName);
                         }
 
-                        hbaseAdmin.deleteTable(TableName.valueOf(htableName));
+                        hbaseAdmin.deleteTable(htableName);
                         logger.info("Deleted HBase table " + htableName);
                     } else {
                         logger.info("HBase table" + htableName + " does not exist");

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
index 1daca0a..937b65f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
@@ -23,7 +23,6 @@ import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
@@ -32,15 +31,12 @@ import java.util.TreeSet;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.ClusterStatus;
-import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.RegionLoad;
 import org.apache.hadoop.hbase.ServerLoad;
 import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.RegionLocator;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.kylin.common.util.Pair;
 import org.slf4j.Logger;
@@ -62,31 +58,30 @@ public class HBaseRegionSizeCalculator {
     /**
      * Computes size of each region for table and given column families.
      * */
-    public HBaseRegionSizeCalculator(String tableName, Connection hbaseConnection) throws IOException {
+    public HBaseRegionSizeCalculator(HTable table) throws IOException {
+        this(table, new HBaseAdmin(table.getConfiguration()));
+    }
 
-        Table table = null;
-        Admin admin = null;
-        try {
-            table = hbaseConnection.getTable(TableName.valueOf(tableName));
-            admin = hbaseConnection.getAdmin();
+    /** Constructor for unit testing */
+    HBaseRegionSizeCalculator(HTable table, HBaseAdmin hBaseAdmin) throws IOException {
 
+        try {
             if (!enabled(table.getConfiguration())) {
                 logger.info("Region size calculation disabled.");
                 return;
             }
 
-            logger.info("Calculating region sizes for table \"" + table.getName() + "\".");
+            logger.info("Calculating region sizes for table \"" + new String(table.getTableName()) + "\".");
 
             // Get regions for table.
-            RegionLocator regionLocator = hbaseConnection.getRegionLocator(table.getName());
-            List<HRegionLocation> regionLocationList = regionLocator.getAllRegionLocations();
+            Set<HRegionInfo> tableRegionInfos = table.getRegionLocations().keySet();
             Set<byte[]> tableRegions = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
 
-            for (HRegionLocation hRegionLocation : regionLocationList) {
-                tableRegions.add(hRegionLocation.getRegionInfo().getRegionName());
+            for (HRegionInfo regionInfo : tableRegionInfos) {
+                tableRegions.add(regionInfo.getRegionName());
             }
 
-            ClusterStatus clusterStatus = admin.getClusterStatus();
+            ClusterStatus clusterStatus = hBaseAdmin.getClusterStatus();
             Collection<ServerName> servers = clusterStatus.getServers();
             final long megaByte = 1024L * 1024L;
 
@@ -110,7 +105,7 @@ public class HBaseRegionSizeCalculator {
                 }
             }
         } finally {
-            IOUtils.closeQuietly(admin);
+            IOUtils.closeQuietly(hBaseAdmin);
         }
 
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
index a2f60d4..266f7e7 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
@@ -23,10 +23,9 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 
@@ -43,8 +42,8 @@ public class HBaseUsage {
         Map<String, List<String>> envs = Maps.newHashMap();
 
         // get all kylin hbase tables
-        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-        Admin hbaseAdmin = conn.getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
         String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
         HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
         for (HTableDescriptor desc : tableDescriptors) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
index 8dd2164..1db60fb 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
@@ -32,15 +32,15 @@ import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
-import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
@@ -58,11 +58,11 @@ public class HbaseStreamingInput {
     private static final byte[] QN = "C".getBytes();
 
     public static void createTable(String tableName) throws IOException {
-        Connection conn = getConnection();
-        Admin hadmin = conn.getAdmin();
+        HConnection conn = getConnection();
+        HBaseAdmin hadmin = new HBaseAdmin(conn);
 
         try {
-            boolean tableExist = hadmin.tableExists(TableName.valueOf(tableName));
+            boolean tableExist = hadmin.tableExists(tableName);
             if (tableExist) {
                 logger.info("HTable '" + tableName + "' already exists");
                 return;
@@ -120,8 +120,8 @@ public class HbaseStreamingInput {
                 e.printStackTrace();
             }
 
-            Connection conn = getConnection();
-            Table table = conn.getTable(TableName.valueOf(tableName));
+            HConnection conn = getConnection();
+            HTableInterface table = conn.getTable(tableName);
 
             byte[] key = new byte[8 + 4];//time + id
 
@@ -136,7 +136,7 @@ public class HbaseStreamingInput {
                 Bytes.putInt(key, 8, i);
                 Put put = new Put(key);
                 byte[] cell = randomBytes(CELL_SIZE);
-                put.addColumn(CF, QN, cell);
+                put.add(CF, QN, cell);
                 buffer.add(put);
             }
             table.put(buffer);
@@ -172,8 +172,8 @@ public class HbaseStreamingInput {
             }
 
             Random r = new Random();
-            Connection conn = getConnection();
-            Table table = conn.getTable(TableName.valueOf(tableName));
+            HConnection conn = getConnection();
+            HTableInterface table = conn.getTable(tableName);
 
             long leftBound = getFirstKeyTime(table);
             long rightBound = System.currentTimeMillis();
@@ -208,7 +208,7 @@ public class HbaseStreamingInput {
         }
     }
 
-    private static long getFirstKeyTime(Table table) throws IOException {
+    private static long getFirstKeyTime(HTableInterface table) throws IOException {
         long startTime = 0;
 
         Scan scan = new Scan();
@@ -226,8 +226,8 @@ public class HbaseStreamingInput {
 
     }
 
-    private static Connection getConnection() throws IOException {
-        return HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+    private static HConnection getConnection() throws IOException {
+        return HConnectionManager.createConnection(HBaseConnection.getCurrentHBaseConfiguration());
     }
 
     private static String formatTime(long time) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
index ea05ab2..ca1a060 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
@@ -23,11 +23,10 @@ import java.io.IOException;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.engine.mr.common.BatchConstants;
@@ -51,8 +50,8 @@ public class HtableAlterMetadataCLI extends AbstractApplication {
     String metadataValue;
 
     private void alter() throws IOException {
-        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-        Admin hbaseAdmin = conn.getAdmin();
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
         HTableDescriptor table = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
 
         hbaseAdmin.disableTable(table.getTableName());

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
index df4e912..8ff5b0f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
@@ -30,14 +30,10 @@ import org.apache.commons.cli.Options;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.kylin.common.KylinConfig;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
-import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -56,9 +52,9 @@ public class OrphanHBaseCleanJob extends AbstractApplication {
     Set<String> metastoreWhitelistSet = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
 
     private void cleanUnusedHBaseTables(Configuration conf) throws IOException {
-        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+
         // get all kylin hbase tables
-        Admin hbaseAdmin = conn.getAdmin();
+        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
         String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
         HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
         List<String> allTablesNeedToBeDropped = new ArrayList<String>();
@@ -77,13 +73,12 @@ public class OrphanHBaseCleanJob extends AbstractApplication {
             // drop tables
             for (String htableName : allTablesNeedToBeDropped) {
                 logger.info("Deleting HBase table " + htableName);
-                TableName tableName = TableName.valueOf(htableName);
-                if (hbaseAdmin.tableExists(tableName)) {
-                    if (hbaseAdmin.isTableEnabled(tableName)) {
-                        hbaseAdmin.disableTable(tableName);
+                if (hbaseAdmin.tableExists(htableName)) {
+                    if (hbaseAdmin.isTableEnabled(htableName)) {
+                        hbaseAdmin.disableTable(htableName);
                     }
 
-                    hbaseAdmin.deleteTable(tableName);
+                    hbaseAdmin.deleteTable(htableName);
                     logger.info("Deleted HBase table " + htableName);
                 } else {
                     logger.info("HBase table" + htableName + " does not exist");

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
index bba6745..1ea8e8d 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
@@ -22,13 +22,12 @@ import java.io.IOException;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.token.TokenUtil;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -60,12 +59,12 @@ public class PingHBaseCLI {
         Scan scan = new Scan();
         int limit = 20;
 
-        Connection conn = null;
-        Table table = null;
+        HConnection conn = null;
+        HTableInterface table = null;
         ResultScanner scanner = null;
         try {
-            conn = ConnectionFactory.createConnection(hconf);
-            table = conn.getTable(TableName.valueOf(hbaseTable));
+            conn = HConnectionManager.createConnection(hconf);
+            table = conn.getTable(hbaseTable);
             scanner = table.getScanner(scan);
             int count = 0;
             for (Result r : scanner) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
index db516bb..01edb1f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
@@ -22,12 +22,11 @@ import java.io.IOException;
 import java.util.Iterator;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.BytesUtil;
 import org.apache.kylin.storage.hbase.HBaseConnection;
@@ -71,8 +70,8 @@ public class RowCounterCLI {
 
         logger.info("My Scan " + scan.toString());
 
-        Connection conn = ConnectionFactory.createConnection(conf);
-        Table tableInterface = conn.getTable(TableName.valueOf(htableName));
+        HConnection conn = HConnectionManager.createConnection(conf);
+        HTableInterface tableInterface = conn.getTable(htableName);
 
         Iterator<Result> iterator = tableInterface.getScanner(scan).iterator();
         int counter = 0;

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
index f6b65ab..23e7e10 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
@@ -40,9 +40,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.CliCommandExecutor;
@@ -59,7 +57,6 @@ import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableManager;
 import org.apache.kylin.job.execution.ExecutableState;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
-import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -80,8 +77,7 @@ public class StorageCleanupJob extends AbstractApplication {
     private void cleanUnusedHBaseTables(Configuration conf) throws IOException {
         CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
         // get all kylin hbase tables
-        Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
-        Admin hbaseAdmin = conn.getAdmin();
+        HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
         String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
         HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
         List<String> allTablesNeedToBeDropped = new ArrayList<String>();
@@ -157,22 +153,22 @@ public class StorageCleanupJob extends AbstractApplication {
     }
 
     class DeleteHTableRunnable implements Callable {
-        Admin hbaseAdmin;
+        HBaseAdmin hbaseAdmin;
         String htableName;
 
-        DeleteHTableRunnable(Admin hbaseAdmin, String htableName) {
+        DeleteHTableRunnable(HBaseAdmin hbaseAdmin, String htableName) {
             this.hbaseAdmin = hbaseAdmin;
             this.htableName = htableName;
         }
 
         public Object call() throws Exception {
             logger.info("Deleting HBase table " + htableName);
-            if (hbaseAdmin.tableExists(TableName.valueOf(htableName))) {
-                if (hbaseAdmin.isTableEnabled(TableName.valueOf(htableName))) {
-                    hbaseAdmin.disableTable(TableName.valueOf(htableName));
+            if (hbaseAdmin.tableExists(htableName)) {
+                if (hbaseAdmin.isTableEnabled(htableName)) {
+                    hbaseAdmin.disableTable(htableName);
                 }
 
-                hbaseAdmin.deleteTable(TableName.valueOf(htableName));
+                hbaseAdmin.deleteTable(htableName);
                 logger.info("Deleted HBase table " + htableName);
             } else {
                 logger.info("HBase table" + htableName + " does not exist");

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
index 42a54c8..e36f662 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
@@ -24,18 +24,16 @@ import java.util.Arrays;
 import java.util.List;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
+import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -51,15 +49,14 @@ public class UpdateHTableHostCLI {
     private List<String> errorMsgs = Lists.newArrayList();
 
     private List<String> htables;
-    private Admin hbaseAdmin;
+    private HBaseAdmin hbaseAdmin;
     private KylinConfig kylinConfig;
     private String oldHostValue;
 
     public UpdateHTableHostCLI(List<String> htables, String oldHostValue) throws IOException {
         this.htables = htables;
         this.oldHostValue = oldHostValue;
-        Connection conn = ConnectionFactory.createConnection(HBaseConfiguration.create());
-        hbaseAdmin = conn.getAdmin();
+        this.hbaseAdmin = new HBaseAdmin(HBaseConnection.getCurrentHBaseConfiguration());
         this.kylinConfig = KylinConfig.getInstanceFromEnv();
     }
 
@@ -169,9 +166,9 @@ public class UpdateHTableHostCLI {
         HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
         if (oldHostValue.equals(desc.getValue(IRealizationConstants.HTableTag))) {
             desc.setValue(IRealizationConstants.HTableTag, kylinConfig.getMetadataUrlPrefix());
-            hbaseAdmin.disableTable(TableName.valueOf(tableName));
-            hbaseAdmin.modifyTable(TableName.valueOf(tableName), desc);
-            hbaseAdmin.enableTable(TableName.valueOf(tableName));
+            hbaseAdmin.disableTable(tableName);
+            hbaseAdmin.modifyTable(tableName, desc);
+            hbaseAdmin.enableTable(tableName);
 
             updatedResources.add(tableName);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/tool/pom.xml
----------------------------------------------------------------------
diff --git a/tool/pom.xml b/tool/pom.xml
index 278c2b8..072ee44 100644
--- a/tool/pom.xml
+++ b/tool/pom.xml
@@ -60,16 +60,6 @@
             <artifactId>hbase-client</artifactId>
             <scope>provided</scope>
         </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-yarn-api</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-yarn-common</artifactId>
-            <scope>provided</scope>
-        </dependency>
 
         <!-- Env & Test -->
         <dependency>

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
index c0042f3..c8bff89 100644
--- a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
@@ -36,9 +36,9 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.RawResource;
@@ -231,7 +231,6 @@ public class CubeMigrationCLI {
             operations.add(new Opt(OptType.COPY_DICT_OR_SNAPSHOT, new Object[] { item, cube.getName() }));
         }
     }
-
     private static void addCubeAndModelIntoProject(CubeInstance srcCube, String cubeName, String projectName) throws IOException {
         String projectResPath = ProjectInstance.concatResourcePath(projectName);
         if (!dstStore.exists(projectResPath))
@@ -448,11 +447,11 @@ public class CubeMigrationCLI {
             Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
             ProjectInstance project = dstStore.getResource(projectResPath, ProjectInstance.class, projectSerializer);
             String projUUID = project.getUuid();
-            Table srcAclHtable = null;
-            Table destAclHtable = null;
+            HTableInterface srcAclHtable = null;
+            HTableInterface destAclHtable = null;
             try {
-                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(TableName.valueOf(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
 
                 // cube acl
                 Result result = srcAclHtable.get(new Get(Bytes.toBytes(cubeId)));
@@ -472,6 +471,7 @@ public class CubeMigrationCLI {
                         destAclHtable.put(put);
                     }
                 }
+                destAclHtable.flushCommits();
             } finally {
                 IOUtils.closeQuietly(srcAclHtable);
                 IOUtils.closeQuietly(destAclHtable);
@@ -537,12 +537,13 @@ public class CubeMigrationCLI {
         case COPY_ACL: {
             String cubeId = (String) opt.params[0];
             String modelId = (String) opt.params[1];
-            Table destAclHtable = null;
+            HTableInterface destAclHtable = null;
             try {
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
 
                 destAclHtable.delete(new Delete(Bytes.toBytes(cubeId)));
                 destAclHtable.delete(new Delete(Bytes.toBytes(modelId)));
+                destAclHtable.flushCommits();
             } finally {
                 IOUtils.closeQuietly(destAclHtable);
             }
@@ -559,7 +560,7 @@ public class CubeMigrationCLI {
         }
     }
 
-    private static void updateMeta(KylinConfig config) {
+    private static void updateMeta(KylinConfig config){
         String[] nodes = config.getRestServers();
         for (String node : nodes) {
             RestClient restClient = new RestClient(node);

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java b/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
index f52fc3e..19e5db0 100644
--- a/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
@@ -25,11 +25,10 @@ import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -232,9 +231,9 @@ public class ExtendCubeToHybridCLI {
         Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
         ProjectInstance project = store.getResource(projectResPath, ProjectInstance.class, projectSerializer);
         String projUUID = project.getUuid();
-        Table aclHtable = null;
+        HTableInterface aclHtable = null;
         try {
-            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(TableName.valueOf(kylinConfig.getMetadataUrlPrefix() + "_acl"));
+            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(kylinConfig.getMetadataUrlPrefix() + "_acl");
 
             // cube acl
             Result result = aclHtable.get(new Get(Bytes.toBytes(origCubeId)));
@@ -254,6 +253,7 @@ public class ExtendCubeToHybridCLI {
                     aclHtable.put(put);
                 }
             }
+            aclHtable.flushCommits();
         } finally {
             IOUtils.closeQuietly(aclHtable);
         }


[03/13] kylin git commit: KYLIN-2384 fix compile with Kyro registration

Posted by li...@apache.org.
KYLIN-2384 fix compile with Kyro registration


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/e77a848e
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/e77a848e
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/e77a848e

Branch: refs/heads/master-hbase0.98
Commit: e77a848e343f2e1f9a9382e336a48c85de25212a
Parents: 4dce0cf
Author: Li Yang <li...@apache.org>
Authored: Tue Feb 21 10:25:52 2017 +0800
Committer: Li Yang <li...@apache.org>
Committed: Tue Feb 21 10:25:52 2017 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/engine/spark/KylinKryoRegistrator.java | 9 +++++----
 1 file changed, 5 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/e77a848e/engine-spark/src/main/java/org/apache/kylin/engine/spark/KylinKryoRegistrator.java
----------------------------------------------------------------------
diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/KylinKryoRegistrator.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/KylinKryoRegistrator.java
index 20b39ee..daa1053 100644
--- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/KylinKryoRegistrator.java
+++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/KylinKryoRegistrator.java
@@ -18,13 +18,14 @@
 
 package org.apache.kylin.engine.spark;
 
-import com.esotericsoftware.kryo.Kryo;
+import java.util.LinkedHashSet;
+import java.util.Set;
+
 import org.apache.spark.serializer.KryoRegistrator;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.LinkedHashSet;
-import java.util.Set;
+import com.esotericsoftware.kryo.Kryo;
 
 /**
  * Registor for registering classes and serializers to Kryo
@@ -163,7 +164,7 @@ public class KylinKryoRegistrator implements KryoRegistrator {
         kyroClasses.add(org.apache.kylin.dict.DictionaryInfo.class);
         kyroClasses.add(org.apache.kylin.dict.NumberDictionary.class);
         kyroClasses.add(org.apache.kylin.dict.NumberDictionary2.class);
-        kyroClasses.add(org.apache.kylin.dict.NumberDictionaryForestBuilder.Number2BytesConverter.class);
+        kyroClasses.add(org.apache.kylin.dict.Number2BytesConverter.class);
         kyroClasses.add(org.apache.kylin.dict.StringBytesConverter.class);
         kyroClasses.add(org.apache.kylin.dict.TimeStrDictionary.class);
         kyroClasses.add(org.apache.kylin.dict.TrieDictionary.class);


[08/13] kylin git commit: KYLIN 1875 filter in model dimensions and measures

Posted by li...@apache.org.
KYLIN 1875 filter in model dimensions and measures

Signed-off-by: Li Yang <li...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/78a082f4
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/78a082f4
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/78a082f4

Branch: refs/heads/master-hbase0.98
Commit: 78a082f4ca9f587617152ef6b2935cb55c4618e0
Parents: 7ce3a15
Author: chenzhx <34...@qq.com>
Authored: Tue Feb 21 16:40:50 2017 +0800
Committer: Li Yang <li...@apache.org>
Committed: Tue Feb 21 17:32:18 2017 +0800

----------------------------------------------------------------------
 webapp/app/js/filters/filter.js                       | 10 ++++++++++
 webapp/app/partials/cubeDesigner/dimensions.html      |  4 ++--
 webapp/app/partials/modelDesigner/data_model.html     | 14 ++++++--------
 .../app/partials/modelDesigner/model_dimensions.html  |  2 +-
 4 files changed, 19 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/78a082f4/webapp/app/js/filters/filter.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/filters/filter.js b/webapp/app/js/filters/filter.js
index 01c060a..78f6cf9 100755
--- a/webapp/app/js/filters/filter.js
+++ b/webapp/app/js/filters/filter.js
@@ -204,6 +204,16 @@ KylinApp
       });
       return out;
     }
+  }).filter('inMeaNotInDim', function ($filter) {
+        return function (inputArr, table, arr) {
+          var out=[];
+          angular.forEach(inputArr, function (inputItem) {
+            if (arr.indexOf(table+"."+inputItem.name) == -1) {
+              out.push(inputItem);
+            }
+          });
+          return out;
+        }
   }).filter('assignedMeasureNames', function ($filter) {
     //return the measures that haven't assign to column family
     return function (inputArr, assignedArr) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/78a082f4/webapp/app/partials/cubeDesigner/dimensions.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/cubeDesigner/dimensions.html b/webapp/app/partials/cubeDesigner/dimensions.html
index e1833d3..d7c2321 100644
--- a/webapp/app/partials/cubeDesigner/dimensions.html
+++ b/webapp/app/partials/cubeDesigner/dimensions.html
@@ -221,7 +221,7 @@
                                 <h4>{{table}}&nbsp;[FactTable]</h4>
                                 <table class="table table-striped table-hover ng-scope">
                                   <tr >
-                                    <td class="col-xs-2"><label><input type="checkbox" ng-model="selectedColumns[table].all" ng-change="autoChangeAll(table)">Select All</label></td>
+                                    <td class="col-xs-2"><label><input type="checkbox" ng-model="selectedColumns[table].all" ng-change="autoChangeAll(table)">&nbsp;Select All</label></td>
                                     <td class="col-xs-4"><label>Name</label></td>
                                     <td class="col-xs-3"><label>Columns</label></td>
                                     <td colspan="2" class="col-xs-3"></td>
@@ -247,7 +247,7 @@
                                 <h4>{{table}}&nbsp;[LookupTable]</h4>
                                 <table class="table table-striped table-hover ng-scope">
                                   <tr class="row" >
-                                    <td class="col-xs-2"><label><input type="checkbox" ng-model="selectedColumns[table].all" ng-change="autoChangeAll(table)">Select All</label></td>
+                                    <td class="col-xs-2"><label><input type="checkbox" ng-model="selectedColumns[table].all" ng-change="autoChangeAll(table)">&nbsp;Select All</label></td>
                                     <td class="col-xs-4"><label>Name</label></td>
                                     <td class="col-xs-3"><label>Columns</label></td>
                                     <td colspan="2" class="col-xs-3"></td>

http://git-wip-us.apache.org/repos/asf/kylin/blob/78a082f4/webapp/app/partials/modelDesigner/data_model.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/modelDesigner/data_model.html b/webapp/app/partials/modelDesigner/data_model.html
index b55935e..d6a6023 100644
--- a/webapp/app/partials/modelDesigner/data_model.html
+++ b/webapp/app/partials/modelDesigner/data_model.html
@@ -130,13 +130,12 @@
                     <ng-form name="lookup_form">
                       <div class="form-group">
                         <div class="row">
-                          <div class="col-xs-9">
+                          <div class="col-xs-12">
                             <div>
                               <select chosen ng-model="newLookup.joinTable" class="col-sm-4"
                                       ng-options="table as table for table in aliasName"
                                       name="table_name"  ng-disabled="lookupState.editing"
                                       ng-required="true"
-                                      data-placeholder="Join Table Name"
                                       class="chosen-select">
                                 <option value=""> &#45;&#45; From Table &#45;&#45; </option>
                               </select>
@@ -150,7 +149,6 @@
                                       ng-options="table.name as VdmUtil.removeNameSpace(table.name) for table in tableModel.selectProjectTables"
                                       name="table_name"  ng-disabled="lookupState.editing"
                                       ng-required="true"  ng-change="changeJoinTable()"
-                                      data-placeholder="Lookup Table Name"
                                       class="chosen-select">
                                 <option value=""> &#45;&#45; Select Lookup Table &#45;&#45; </option>
                               </select>
@@ -160,8 +158,8 @@
                       </div>
                     <div class="form-group">
                         <div class="row">
-                            <label class="col-sm-3 control-label font-color-default"><b>Alias</b></label>
-                            <div class="col-sm-6">
+                            <label class="col-sm-6 control-label font-color-default" ><b style="float:right">Table Alias :</b></label>
+                            <div class="col-sm-5">
                               <input type="text" class="form-control " name="joinTable_alias" placeholder="Input Table Alias" ng-required="true"
                                      ng-model="newLookup.alias"  ng-pattern="/^[A-Z_\d]+$/">
                               <small class="help-block red" ng-show="!lookup_form.joinTable_alias.$error.required&&lookup_form.joinTable_alias.$invalid && (lookup_form.joinTable_alias.$dirty||lookup_form.$submitted)"><i class="fa fa-exclamation-triangle"></i>&nbsp;&nbsp; Table alias is invalid(A Combination of numbers, uppercase letters or underscores).</small>
@@ -171,8 +169,8 @@
                     <!--Table Type-->
                     <div class="form-group">
                         <div class="row">
-                          <label class="col-sm-3"></label>
-                          <div class="col-sm-6">
+                          <label class="col-sm-6"></label>
+                          <div class="col-sm-5">
                             <label>
                               <input type="checkbox" ng-model="newLookup.kind" ng-true-value="FACT" ng-false-value="LOOKUP" >&nbsp;Is Limited
                             </label>
@@ -182,7 +180,7 @@
                     </div>
                     <div class="form-group">
                         <div class="row">
-                            <div class="col-xs-9">
+                            <div class="col-xs-12">
                                 <div ng-repeat="joinIndex in [] | range: newLookup.join.primary_key.length">
                                     <div>
                                         <select style="width: 45%" chosen data-placeholder="JoinTable Column"

http://git-wip-us.apache.org/repos/asf/kylin/blob/78a082f4/webapp/app/partials/modelDesigner/model_dimensions.html
----------------------------------------------------------------------
diff --git a/webapp/app/partials/modelDesigner/model_dimensions.html b/webapp/app/partials/modelDesigner/model_dimensions.html
index cfa422f..c246973 100644
--- a/webapp/app/partials/modelDesigner/model_dimensions.html
+++ b/webapp/app/partials/modelDesigner/model_dimensions.html
@@ -76,7 +76,7 @@
                         ng-model="modelsManager.selectedModel.dimensions[$index].columns" multiple>
                         <ui-select-match placeholder="Select Column...">{{$item.name}}</ui-select-match>
                         <ui-select-choices
-                          repeat="column.name as column in getColumnsByAlias(dimension.table) | filter:$select.search">
+                          repeat="column.name as column in getColumnsByAlias(dimension.table) | filter:$select.search|inMeaNotInDim:dimension.table:modelsManager.selectedModel.metrics">
                           {{column.name}}
                         </ui-select-choices>
                       </ui-select>


[02/13] kylin git commit: KYLIN-2384 Refactor and refine NumberDictionary.

Posted by li...@apache.org.
KYLIN-2384 Refactor and refine NumberDictionary.

Signed-off-by: Li Yang <li...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/4dce0cf0
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/4dce0cf0
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/4dce0cf0

Branch: refs/heads/master-hbase0.98
Commit: 4dce0cf0806c29522aa2380296fa38e373028008
Parents: 65acd70
Author: xiefan46 <95...@qq.com>
Authored: Thu Feb 16 15:31:21 2017 +0800
Committer: Li Yang <li...@apache.org>
Committed: Tue Feb 21 10:10:21 2017 +0800

----------------------------------------------------------------------
 .../kylin/dict/Number2BytesConverter.java       | 224 +++++++++++++++++++
 .../org/apache/kylin/dict/NumberDictionary.java | 163 +-------------
 .../apache/kylin/dict/NumberDictionary2.java    |  11 +-
 .../kylin/dict/NumberDictionaryBuilder.java     |   2 +-
 .../dict/NumberDictionaryForestBuilder.java     |  48 +---
 .../apache/kylin/dict/NumberDictionaryTest.java |   3 +-
 6 files changed, 232 insertions(+), 219 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/4dce0cf0/core-dictionary/src/main/java/org/apache/kylin/dict/Number2BytesConverter.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/Number2BytesConverter.java b/core-dictionary/src/main/java/org/apache/kylin/dict/Number2BytesConverter.java
new file mode 100644
index 0000000..4e40527
--- /dev/null
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/Number2BytesConverter.java
@@ -0,0 +1,224 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+package org.apache.kylin.dict;
+
+import org.apache.kylin.common.util.Bytes;
+
+import java.io.Serializable;
+
+/**
+ * Created by xiefan on 17-1-20.
+ */
+public class Number2BytesConverter implements BytesConverter<String>, Serializable {
+
+    private static final long serialVersionUID = 1L;
+
+    public static final int MAX_DIGITS_BEFORE_DECIMAL_POINT_LEGACY = 16;
+
+    public static final int MAX_DIGITS_BEFORE_DECIMAL_POINT = 19;
+
+    int maxDigitsBeforeDecimalPoint;
+
+    static final transient ThreadLocal<NumberBytesCodec> LOCAL = new ThreadLocal<NumberBytesCodec>();
+
+    static NumberBytesCodec getCodec(int maxDigitsBeforeDecimalPoint) {
+        NumberBytesCodec codec = LOCAL.get();
+        if (codec == null) {
+            codec = new NumberBytesCodec(maxDigitsBeforeDecimalPoint);
+            LOCAL.set(codec);
+        }
+        return codec;
+    }
+
+    public Number2BytesConverter(){
+        this.maxDigitsBeforeDecimalPoint = MAX_DIGITS_BEFORE_DECIMAL_POINT;
+    }
+
+    public Number2BytesConverter(int maxDigitsBeforeDecimalPoint) {
+        this.maxDigitsBeforeDecimalPoint = maxDigitsBeforeDecimalPoint;
+    }
+
+    @Override
+    public byte[] convertToBytes(String v) {
+        NumberBytesCodec codec = getCodec(this.maxDigitsBeforeDecimalPoint);
+        byte[] num = Bytes.toBytes(v);
+        codec.encodeNumber(num, 0, num.length);
+        return Bytes.copy(codec.buf, codec.bufOffset, codec.bufLen);
+    }
+
+    @Override
+    public String convertFromBytes(byte[] b, int offset, int length) {
+        NumberBytesCodec codec = getCodec(this.maxDigitsBeforeDecimalPoint);
+        byte[] backup = codec.buf;
+        codec.buf = b;
+        codec.bufOffset = offset;
+        codec.bufLen = length;
+        int len = codec.decodeNumber(backup, 0);
+        codec.buf = backup;
+        return Bytes.toString(backup, 0, len);
+    }
+
+    // encode a number into an order preserving byte sequence
+    // for positives -- padding '0'
+    // for negatives -- '-' sign, padding '9', invert digits, and terminate by ';'
+    static class NumberBytesCodec {
+        int maxDigitsBeforeDecimalPoint;
+        byte[] buf;
+        int bufOffset;
+        int bufLen;
+
+        NumberBytesCodec(int maxDigitsBeforeDecimalPoint) {
+            this.maxDigitsBeforeDecimalPoint = maxDigitsBeforeDecimalPoint;
+            this.buf = new byte[maxDigitsBeforeDecimalPoint * 3];
+            this.bufOffset = 0;
+            this.bufLen = 0;
+        }
+
+        void encodeNumber(byte[] value, int offset, int len) {
+            if (len == 0) {
+                bufOffset = 0;
+                bufLen = 0;
+                return;
+            }
+
+
+            if (len > buf.length) {
+                throw new IllegalArgumentException("Too many digits for NumberDictionary: " + Bytes.toString(value, offset, len) + ". Internal buffer is only " + buf.length + " bytes");
+            }
+
+            boolean negative = value[offset] == '-';
+
+            // terminate negative ';'
+            int start = buf.length - len;
+            int end = buf.length;
+            if (negative) {
+                start--;
+                end--;
+                buf[end] = ';';
+            }
+
+            // copy & find decimal point
+            int decimalPoint = end;
+            for (int i = start, j = offset; i < end; i++, j++) {
+                buf[i] = value[j];
+                if (buf[i] == '.' && i < decimalPoint) {
+                    decimalPoint = i;
+                }
+            }
+            // remove '-' sign
+            if (negative) {
+                start++;
+            }
+
+            // prepend '0'
+            int nZeroPadding = maxDigitsBeforeDecimalPoint - (decimalPoint - start);
+            if (nZeroPadding < 0 || nZeroPadding + 1 > start)
+                throw new IllegalArgumentException("Too many digits for NumberDictionary: " + Bytes.toString(value, offset, len) + ". Expect " + maxDigitsBeforeDecimalPoint + " digits before decimal point at max.");
+            for (int i = 0; i < nZeroPadding; i++) {
+                buf[--start] = '0';
+            }
+
+            // consider negative
+            if (negative) {
+                buf[--start] = '-';
+                for (int i = start + 1; i < buf.length; i++) {
+                    int c = buf[i];
+                    if (c >= '0' && c <= '9') {
+                        buf[i] = (byte) ('9' - (c - '0'));
+                    }
+                }
+            } else {
+                buf[--start] = '0';
+            }
+
+            bufOffset = start;
+            bufLen = buf.length - start;
+
+            // remove 0 in tail after the decimal point
+            if (decimalPoint != end) {
+                if (negative == true) {
+                    while (buf[bufOffset + bufLen - 2] == '9' && (bufOffset + bufLen - 2 > decimalPoint)) {
+                        bufLen--;
+                    }
+
+                    if (bufOffset + bufLen - 2 == decimalPoint) {
+                        bufLen--;
+                    }
+
+                    buf[bufOffset + bufLen - 1] = ';';
+                } else {
+                    while (buf[bufOffset + bufLen - 1] == '0' && (bufOffset + bufLen - 1 > decimalPoint)) {
+                        bufLen--;
+                    }
+
+                    if (bufOffset + bufLen - 1 == decimalPoint) {
+                        bufLen--;
+                    }
+
+                }
+            }
+        }
+
+        int decodeNumber(byte[] returnValue, int offset) {
+            if (bufLen == 0) {
+                return 0;
+            }
+
+            int in = bufOffset;
+            int end = bufOffset + bufLen;
+            int out = offset;
+
+            // sign
+            boolean negative = buf[in] == '-';
+            if (negative) {
+                returnValue[out++] = '-';
+                in++;
+                end--;
+            }
+
+            // remove padding
+            byte padding = (byte) (negative ? '9' : '0');
+            for (; in < end; in++) {
+                if (buf[in] != padding)
+                    break;
+            }
+
+            // all paddings before '.', special case for '0'
+            if (in == end || !(buf[in] >= '0' && buf[in] <= '9')) {
+                returnValue[out++] = '0';
+            }
+
+            // copy the rest
+            if (negative) {
+                for (; in < end; in++, out++) {
+                    int c = buf[in];
+                    if (c >= '0' && c <= '9') {
+                        c = '9' - (c - '0');
+                    }
+                    returnValue[out] = (byte) c;
+                }
+            } else {
+                System.arraycopy(buf, in, returnValue, out, end - in);
+                out += end - in;
+            }
+
+            return out - offset;
+        }
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/kylin/blob/4dce0cf0/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary.java b/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary.java
index de28440..b987eda 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary.java
@@ -18,167 +18,15 @@
 
 package org.apache.kylin.dict;
 
-import org.apache.kylin.common.util.Bytes;
 
 /**
  * @author yangli9
  * 
  */
 @SuppressWarnings("serial")
+@Deprecated
 public class NumberDictionary<T> extends TrieDictionary<T> {
 
-    public static final int MAX_DIGITS_BEFORE_DECIMAL_POINT_LEGACY = 16;
-    public static final int MAX_DIGITS_BEFORE_DECIMAL_POINT = 19;
-
-    // encode a number into an order preserving byte sequence
-    // for positives -- padding '0'
-    // for negatives -- '-' sign, padding '9', invert digits, and terminate by ';'
-    static class NumberBytesCodec {
-        int maxDigitsBeforeDecimalPoint;
-        byte[] buf;
-        int bufOffset;
-        int bufLen;
-
-        NumberBytesCodec(int maxDigitsBeforeDecimalPoint) {
-            this.maxDigitsBeforeDecimalPoint = maxDigitsBeforeDecimalPoint;
-            this.buf = new byte[maxDigitsBeforeDecimalPoint * 3];
-            this.bufOffset = 0;
-            this.bufLen = 0;
-        }
-
-        void encodeNumber(byte[] value, int offset, int len) {
-            if (len == 0) {
-                bufOffset = 0;
-                bufLen = 0;
-                return;
-            }
-
-
-            if (len > buf.length) {
-                throw new IllegalArgumentException("Too many digits for NumberDictionary: " + Bytes.toString(value, offset, len) + ". Internal buffer is only " + buf.length + " bytes");
-            }
-
-            boolean negative = value[offset] == '-';
-
-            // terminate negative ';'
-            int start = buf.length - len;
-            int end = buf.length;
-            if (negative) {
-                start--;
-                end--;
-                buf[end] = ';';
-            }
-
-            // copy & find decimal point
-            int decimalPoint = end;
-            for (int i = start, j = offset; i < end; i++, j++) {
-                buf[i] = value[j];
-                if (buf[i] == '.' && i < decimalPoint) {
-                    decimalPoint = i;
-                }
-            }
-            // remove '-' sign
-            if (negative) {
-                start++;
-            }
-
-            // prepend '0'
-            int nZeroPadding = maxDigitsBeforeDecimalPoint - (decimalPoint - start);
-            if (nZeroPadding < 0 || nZeroPadding + 1 > start)
-                throw new IllegalArgumentException("Too many digits for NumberDictionary: " + Bytes.toString(value, offset, len) + ". Expect " + maxDigitsBeforeDecimalPoint + " digits before decimal point at max.");
-            for (int i = 0; i < nZeroPadding; i++) {
-                buf[--start] = '0';
-            }
-
-            // consider negative
-            if (negative) {
-                buf[--start] = '-';
-                for (int i = start + 1; i < buf.length; i++) {
-                    int c = buf[i];
-                    if (c >= '0' && c <= '9') {
-                        buf[i] = (byte) ('9' - (c - '0'));
-                    }
-                }
-            } else {
-                buf[--start] = '0';
-            }
-
-            bufOffset = start;
-            bufLen = buf.length - start;
-
-            // remove 0 in tail after the decimal point
-            if (decimalPoint != end) {
-                if (negative == true) {
-                    while (buf[bufOffset + bufLen - 2] == '9' && (bufOffset + bufLen - 2 > decimalPoint)) {
-                        bufLen--;
-                    }
-
-                    if (bufOffset + bufLen - 2 == decimalPoint) {
-                        bufLen--;
-                    }
-
-                    buf[bufOffset + bufLen - 1] = ';';
-                } else {
-                    while (buf[bufOffset + bufLen - 1] == '0' && (bufOffset + bufLen - 1 > decimalPoint)) {
-                        bufLen--;
-                    }
-
-                    if (bufOffset + bufLen - 1 == decimalPoint) {
-                        bufLen--;
-                    }
-
-                }
-            }
-        }
-
-        int decodeNumber(byte[] returnValue, int offset) {
-            if (bufLen == 0) {
-                return 0;
-            }
-
-            int in = bufOffset;
-            int end = bufOffset + bufLen;
-            int out = offset;
-
-            // sign
-            boolean negative = buf[in] == '-';
-            if (negative) {
-                returnValue[out++] = '-';
-                in++;
-                end--;
-            }
-
-            // remove padding
-            byte padding = (byte) (negative ? '9' : '0');
-            for (; in < end; in++) {
-                if (buf[in] != padding)
-                    break;
-            }
-
-            // all paddings before '.', special case for '0'
-            if (in == end || !(buf[in] >= '0' && buf[in] <= '9')) {
-                returnValue[out++] = '0';
-            }
-
-            // copy the rest
-            if (negative) {
-                for (; in < end; in++, out++) {
-                    int c = buf[in];
-                    if (c >= '0' && c <= '9') {
-                        c = '9' - (c - '0');
-                    }
-                    returnValue[out] = (byte) c;
-                }
-            } else {
-                System.arraycopy(buf, in, returnValue, out, end - in);
-                out += end - in;
-            }
-
-            return out - offset;
-        }
-    }
-
-    static transient ThreadLocal<NumberBytesCodec> localCodec = new ThreadLocal<NumberBytesCodec>();
 
     // ============================================================================
 
@@ -190,15 +38,6 @@ public class NumberDictionary<T> extends TrieDictionary<T> {
         super(trieBytes);
     }
 
-    protected NumberBytesCodec getCodec() {
-        NumberBytesCodec codec = localCodec.get();
-        if (codec == null) {
-            codec = new NumberBytesCodec(MAX_DIGITS_BEFORE_DECIMAL_POINT_LEGACY);
-            localCodec.set(codec);
-        }
-        return codec;
-    }
-
     @Override
     protected boolean isNullObjectForm(T value) {
         return value == null || value.equals("");

http://git-wip-us.apache.org/repos/asf/kylin/blob/4dce0cf0/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary2.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary2.java b/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary2.java
index 80e9940..3879d33 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary2.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionary2.java
@@ -22,10 +22,9 @@ package org.apache.kylin.dict;
  * This class uses MAX_DIGITS_BEFORE_DECIMAL_POINT (=19) instead of legacy (=16).
  */
 @SuppressWarnings("serial")
+@Deprecated
 public class NumberDictionary2<T> extends NumberDictionary<T> {
 
-    static transient ThreadLocal<NumberBytesCodec> localCodec = new ThreadLocal<NumberBytesCodec>();
-
     // ============================================================================
 
     public NumberDictionary2() { // default constructor for Writable interface
@@ -36,13 +35,5 @@ public class NumberDictionary2<T> extends NumberDictionary<T> {
         super(trieBytes);
     }
 
-    protected NumberBytesCodec getCodec() {
-        NumberBytesCodec codec = localCodec.get();
-        if (codec == null) {
-            codec = new NumberBytesCodec(MAX_DIGITS_BEFORE_DECIMAL_POINT);
-            localCodec.set(codec);
-        }
-        return codec;
-    }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4dce0cf0/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryBuilder.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryBuilder.java b/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryBuilder.java
index 288e38f..26e4f89 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryBuilder.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryBuilder.java
@@ -29,7 +29,7 @@ public class NumberDictionaryBuilder extends TrieDictionaryBuilder<String> {
 
 
     public NumberDictionaryBuilder() {
-        super(new NumberDictionaryForestBuilder.Number2BytesConverter());
+        super(new Number2BytesConverter(Number2BytesConverter.MAX_DIGITS_BEFORE_DECIMAL_POINT));
     }
 
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/4dce0cf0/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryForestBuilder.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryForestBuilder.java b/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryForestBuilder.java
index b072599..8b7026d 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryForestBuilder.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryForestBuilder.java
@@ -18,62 +18,20 @@
 
 package org.apache.kylin.dict;
 
-import java.io.Serializable;
-
-import org.apache.kylin.common.util.Bytes;
-import org.apache.kylin.dict.NumberDictionary.NumberBytesCodec;
-
 /**
  * Created by xiefan on 16-11-2.
  */
 public class NumberDictionaryForestBuilder extends TrieDictionaryForestBuilder<String> {
 
-    public static class Number2BytesConverter implements BytesConverter<String>, Serializable {
-        private static final long serialVersionUID = 1L;
-        
-        static final int MAX_DIGITS_BEFORE_DECIMAL_POINT = NumberDictionary.MAX_DIGITS_BEFORE_DECIMAL_POINT;
-        static final transient ThreadLocal<NumberBytesCodec> LOCAL = new ThreadLocal<NumberBytesCodec>();
-
-        static NumberBytesCodec getCodec() {
-            NumberBytesCodec codec = LOCAL.get();
-            if (codec == null) {
-                codec = new NumberBytesCodec(MAX_DIGITS_BEFORE_DECIMAL_POINT);
-                LOCAL.set(codec);
-            }
-            return codec;
-        }
-        
-        @Override
-        public byte[] convertToBytes(String v) {
-            NumberBytesCodec codec = getCodec();
-            byte[] num = Bytes.toBytes(v);
-            codec.encodeNumber(num, 0, num.length);
-            return Bytes.copy(codec.buf, codec.bufOffset, codec.bufLen);
-        }
-
-        @Override
-        public String convertFromBytes(byte[] b, int offset, int length) {
-            NumberBytesCodec codec = getCodec();
-            byte[] backup = codec.buf;
-            codec.buf = b;
-            codec.bufOffset = offset;
-            codec.bufLen = length;
-            int len = codec.decodeNumber(backup, 0);
-            codec.buf = backup;
-            return Bytes.toString(backup, 0, len);
-        }
-
-    }
-
     public NumberDictionaryForestBuilder() {
-        super(new Number2BytesConverter());
+        super(new Number2BytesConverter(Number2BytesConverter.MAX_DIGITS_BEFORE_DECIMAL_POINT));
     }
 
     public NumberDictionaryForestBuilder(int baseId) {
-        super(new Number2BytesConverter(), 0);
+        super(new Number2BytesConverter(Number2BytesConverter.MAX_DIGITS_BEFORE_DECIMAL_POINT), 0);
     }
 
     public NumberDictionaryForestBuilder(int baseId, int maxTrieSizeMB) {
-        super(new Number2BytesConverter(), 0, maxTrieSizeMB);
+        super(new Number2BytesConverter(Number2BytesConverter.MAX_DIGITS_BEFORE_DECIMAL_POINT), 0, maxTrieSizeMB);
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4dce0cf0/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java b/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java
index 36eedf5..8da7208 100644
--- a/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java
+++ b/core-dictionary/src/test/java/org/apache/kylin/dict/NumberDictionaryTest.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.dict;
 
+import static org.apache.kylin.dict.Number2BytesConverter.MAX_DIGITS_BEFORE_DECIMAL_POINT;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.fail;
 
@@ -44,7 +45,7 @@ import com.google.common.collect.Sets;
  */
 public class NumberDictionaryTest extends LocalFileMetadataTestCase {
 
-    NumberDictionary.NumberBytesCodec codec = new NumberDictionary.NumberBytesCodec(NumberDictionary.MAX_DIGITS_BEFORE_DECIMAL_POINT);
+    Number2BytesConverter.NumberBytesCodec codec = new Number2BytesConverter.NumberBytesCodec(MAX_DIGITS_BEFORE_DECIMAL_POINT);
     Random rand = new Random();
 
     @Before


[10/13] kylin git commit: minor, getConfigAsString read .override properties

Posted by li...@apache.org.
minor, getConfigAsString read .override properties

Signed-off-by: Hongbin Ma <ma...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/661f016b
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/661f016b
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/661f016b

Branch: refs/heads/master-hbase0.98
Commit: 661f016bbf8eae6d3ed8efb8ab1672ac212c62b4
Parents: 4c6fd2c
Author: Roger Shi <ro...@hotmail.com>
Authored: Tue Feb 21 18:38:51 2017 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Tue Feb 21 18:44:29 2017 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/common/KylinConfig.java    | 70 +++++++++++---------
 .../kylin/common/util/OrderedProperties.java    |  6 ++
 2 files changed, 46 insertions(+), 30 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/661f016b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
index e8127ec..4fcc61f 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
@@ -20,6 +20,7 @@ package org.apache.kylin.common;
 
 import java.io.File;
 import java.io.FileInputStream;
+import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
@@ -44,7 +45,6 @@ public class KylinConfig extends KylinConfigBase {
     private static final long serialVersionUID = 1L;
     private static final Logger logger = LoggerFactory.getLogger(KylinConfig.class);
 
-
     /** Kylin properties file name */
     public static final String KYLIN_CONF_PROPERTIES_FILE = "kylin.properties";
     public static final String KYLIN_CONF = "KYLIN_CONF";
@@ -182,11 +182,11 @@ public class KylinConfig extends KylinConfigBase {
         props.load(new StringReader(propsInStr));
         return createKylinConfig(props);
     }
-    
+
     public static KylinConfig createKylinConfig(KylinConfig another) {
         return createKylinConfig(another.getAllProperties());
     }
-    
+
     public static KylinConfig createKylinConfig(Properties prop) {
         KylinConfig kylinConfig = new KylinConfig();
         kylinConfig.reloadKylinConfig(prop);
@@ -225,31 +225,58 @@ public class KylinConfig extends KylinConfigBase {
     }
 
     public static Properties getKylinProperties() {
+        Properties conf = new Properties();
+        try {
+            OrderedProperties orderedProperties = getKylinOrderedProperties();
+            for (Map.Entry<String, String> each: orderedProperties.entrySet()) {
+                conf.put(each.getKey(), each.getValue());
+            }
+        } catch (FileNotFoundException e) {
+            throw new RuntimeException(e);
+        }
+
+        return conf;
+    }
+
+    public static OrderedProperties getKylinOrderedProperties() throws FileNotFoundException {
         File propFile = getKylinPropertiesFile();
         if (propFile == null || !propFile.exists()) {
             logger.error("fail to locate " + KYLIN_CONF_PROPERTIES_FILE);
             throw new RuntimeException("fail to locate " + KYLIN_CONF_PROPERTIES_FILE);
         }
-        Properties conf = new Properties();
+
+        final InputStream is = new FileInputStream(propFile);
         try {
-            FileInputStream is = new FileInputStream(propFile);
-            conf.load(is);
-            IOUtils.closeQuietly(is);
-            conf = BCC.check(conf);
+            OrderedProperties orderedProperties = new OrderedProperties();
+            orderedProperties.load(is);
+            orderedProperties = BCC.check(orderedProperties);
 
             File propOverrideFile = new File(propFile.getParentFile(), propFile.getName() + ".override");
             if (propOverrideFile.exists()) {
                 FileInputStream ois = new FileInputStream(propOverrideFile);
-                Properties propOverride = new Properties();
-                propOverride.load(ois);
-                IOUtils.closeQuietly(ois);
-                conf.putAll(BCC.check(propOverride));
+                try {
+                    OrderedProperties propOverride = new OrderedProperties();
+                    propOverride.load(ois);
+                    orderedProperties.putAll(BCC.check(propOverride));
+                } finally {
+                    IOUtils.closeQuietly(ois);
+                }
             }
+            return orderedProperties;
         } catch (IOException e) {
             throw new RuntimeException(e);
+        } finally {
+            IOUtils.closeQuietly(is);
         }
+    }
 
-        return conf;
+    public static String getConfigAsString() throws IOException {
+        OrderedProperties orderedProperties = getKylinOrderedProperties();
+        final StringBuilder sb = new StringBuilder();
+        for (Map.Entry<String, String> entry : orderedProperties.entrySet()) {
+            sb.append(entry.getKey() + "=" + entry.getValue()).append('\n');
+        }
+        return sb.toString();
     }
 
     /**
@@ -301,23 +328,6 @@ public class KylinConfig extends KylinConfigBase {
         }
     }
 
-    public String getConfigAsString() throws IOException {
-        final File propertiesFile = getKylinPropertiesFile();
-        final InputStream is = new FileInputStream(propertiesFile);
-        try {
-            OrderedProperties orderedProperties = new OrderedProperties();
-            orderedProperties.load(is);
-            orderedProperties = BCC.check(orderedProperties);
-            final StringBuilder sb = new StringBuilder();
-            for (Map.Entry<String, String> entry : orderedProperties.entrySet()) {
-                sb.append(entry.getKey() + "=" + entry.getValue()).append('\n');
-            }
-            return sb.toString();
-        } finally {
-            IOUtils.closeQuietly(is);
-        }
-    }
-
     public KylinConfig base() {
         return this;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/661f016b/core-common/src/main/java/org/apache/kylin/common/util/OrderedProperties.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/util/OrderedProperties.java b/core-common/src/main/java/org/apache/kylin/common/util/OrderedProperties.java
index 0e5a3f9..ffeced1 100644
--- a/core-common/src/main/java/org/apache/kylin/common/util/OrderedProperties.java
+++ b/core-common/src/main/java/org/apache/kylin/common/util/OrderedProperties.java
@@ -183,6 +183,12 @@ public final class OrderedProperties implements Serializable {
         return new LinkedHashSet<Map.Entry<String, String>>(properties.entrySet());
     }
 
+    public void putAll(OrderedProperties others) {
+        for (Map.Entry<String, String> each : others.entrySet()) {
+            properties.put(each.getKey(), each.getValue());
+        }
+    }
+
     /**
      * See {@link Properties#load(InputStream)}.
      */


[11/13] kylin git commit: code refactor per KYLIN-2438

Posted by li...@apache.org.
code refactor per KYLIN-2438


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/4bdb62cb
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/4bdb62cb
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/4bdb62cb

Branch: refs/heads/master-hbase0.98
Commit: 4bdb62cb3697e15d005a1a5383edf21f2a3ff567
Parents: 661f016
Author: Hongbin Ma <ma...@apache.org>
Authored: Mon Feb 20 10:15:53 2017 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Tue Feb 21 20:58:31 2017 +0800

----------------------------------------------------------------------
 .../apache/kylin/common/KylinConfigBase.java    |   6 +-
 .../org/apache/kylin/common/QueryContext.java   |   2 +-
 .../cube/inmemcubing/ConcurrentDiskStore.java   |   7 +-
 .../cube/inmemcubing/InMemCubeBuilder.java      |   5 -
 .../kylin/cube/inmemcubing/MemDiskStore.java    |   6 -
 .../apache/kylin/gridtable/EmptyGTScanner.java  |   9 +-
 .../kylin/gridtable/GTAggregateScanner.java     |   7 +-
 .../apache/kylin/gridtable/GTFilterScanner.java |   5 -
 .../apache/kylin/gridtable/GTScanRequest.java   |   9 +-
 .../org/apache/kylin/gridtable/IGTScanner.java  |   4 -
 .../benchmark/SortedGTRecordGenerator.java      |   6 -
 .../gridtable/memstore/GTSimpleMemStore.java    |   5 -
 .../gridtable/AggregationCacheSpillTest.java    |  12 +-
 .../kylin/gridtable/SimpleGridTableTest.java    |   9 +-
 .../storage/gtrecord/CubeSegmentScanner.java    |   5 -
 .../kylin/storage/gtrecord/ScannerWorker.java   |  15 +-
 .../gtrecord/StorageResponseGTScatter.java      |   9 +-
 .../apache/kylin/query/ITFailfastQueryTest.java | 154 +++++++++++++++++++
 .../apache/kylin/query/ITKylinQueryTest.java    |  18 ---
 .../org/apache/kylin/query/KylinTestBase.java   |  15 ++
 .../apache/kylin/rest/service/QueryService.java |  12 +-
 .../hbase/cube/HBaseScannerBenchmark.java       |   3 -
 .../storage/hbase/cube/SimpleHBaseStore.java    |   5 -
 .../hbase/cube/v2/CubeHBaseEndpointRPC.java     |  22 ++-
 .../storage/hbase/cube/v2/CubeHBaseScanRPC.java |  33 +---
 .../hbase/cube/v2/HBaseReadonlyStore.java       |   5 -
 .../coprocessor/endpoint/CubeVisitService.java  |   2 +-
 .../endpoint/protobuf/CubeVisit.proto           |   2 +-
 .../hbase/steps/SandboxMetastoreCLI.java        |   1 -
 29 files changed, 210 insertions(+), 183 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index f7d8452..13d967d 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -826,7 +826,7 @@ abstract public class KylinConfigBase implements Serializable {
     }
 
     public int getLargeQueryThreshold() {
-        return Integer.parseInt(getOptional("kylin.query.large-query-threshold", String.valueOf((int) (getScanThreshold() * 0.1))));
+        return Integer.parseInt(getOptional("kylin.query.large-query-threshold", String.valueOf(1000000)));
     }
 
     public int getDerivedInThreshold() {
@@ -865,6 +865,10 @@ abstract public class KylinConfigBase implements Serializable {
         return Long.parseLong(this.getOptional("kylin.query.cache-threshold-scan-count", String.valueOf(10 * 1024)));
     }
 
+    public long getQueryScanBytesCacheThreshold() {
+        return Long.parseLong(this.getOptional("kylin.query.cache-threshold-scan-bytes", String.valueOf(1024 * 1024)));
+    }
+
     public boolean isQuerySecureEnabled() {
         return Boolean.parseBoolean(this.getOptional("kylin.query.security-enabled", "true"));
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/core-common/src/main/java/org/apache/kylin/common/QueryContext.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/QueryContext.java b/core-common/src/main/java/org/apache/kylin/common/QueryContext.java
index 67925b6..5457aa5 100644
--- a/core-common/src/main/java/org/apache/kylin/common/QueryContext.java
+++ b/core-common/src/main/java/org/apache/kylin/common/QueryContext.java
@@ -49,7 +49,7 @@ public class QueryContext {
     }
 
     public String getQueryId() {
-        return queryId;
+        return queryId == null ? "" : queryId;
     }
 
     public void setQueryId(String queryId) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ConcurrentDiskStore.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ConcurrentDiskStore.java b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ConcurrentDiskStore.java
index 5219ede..41d2dfb 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ConcurrentDiskStore.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/ConcurrentDiskStore.java
@@ -265,11 +265,6 @@ public class ConcurrentDiskStore implements IGTStore, Closeable {
             return info;
         }
 
-        @Override
-        public long getScannedRowCount() {
-            return count;
-        }
-
     }
 
     private class Writer implements IGTWriter {
@@ -371,4 +366,4 @@ public class ConcurrentDiskStore implements IGTStore, Closeable {
         return "ConcurrentDiskStore@" + (info.getTableName() == null ? this.hashCode() : info.getTableName());
     }
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilder.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilder.java b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilder.java
index a74f0c0..e08844e 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilder.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/InMemCubeBuilder.java
@@ -564,10 +564,5 @@ public class InMemCubeBuilder extends AbstractInMemCubeBuilder {
         public GTInfo getInfo() {
             return info;
         }
-
-        @Override
-        public long getScannedRowCount() {
-            return 0L;
-        }
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/MemDiskStore.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/MemDiskStore.java b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/MemDiskStore.java
index 81403ab..a5471df 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/MemDiskStore.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/inmemcubing/MemDiskStore.java
@@ -277,12 +277,6 @@ public class MemDiskStore implements IGTStore, Closeable {
         public GTInfo getInfo() {
             return info;
         }
-
-        @Override
-        public long getScannedRowCount() {
-            return count;
-        }
-
     }
 
     private class Writer implements IGTWriter {

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/core-cube/src/main/java/org/apache/kylin/gridtable/EmptyGTScanner.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/EmptyGTScanner.java b/core-cube/src/main/java/org/apache/kylin/gridtable/EmptyGTScanner.java
index 01d31f0..8b6c995 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/EmptyGTScanner.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/EmptyGTScanner.java
@@ -22,10 +22,8 @@ import java.io.IOException;
 import java.util.Iterator;
 
 public class EmptyGTScanner implements IGTScanner {
-    private long reportScannedRowCount;
 
-    public EmptyGTScanner(long reportScannedRowCount) {
-        this.reportScannedRowCount = reportScannedRowCount;
+    public EmptyGTScanner() {
     }
 
     @Override
@@ -34,11 +32,6 @@ public class EmptyGTScanner implements IGTScanner {
     }
 
     @Override
-    public long getScannedRowCount() {
-        return reportScannedRowCount;
-    }
-
-    @Override
     public void close() throws IOException {
 
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/core-cube/src/main/java/org/apache/kylin/gridtable/GTAggregateScanner.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTAggregateScanner.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTAggregateScanner.java
index 8b0efcc..7cdd4f5 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTAggregateScanner.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTAggregateScanner.java
@@ -129,11 +129,6 @@ public class GTAggregateScanner implements IGTScanner {
     }
 
     @Override
-    public long getScannedRowCount() {
-        return inputScanner.getScannedRowCount();
-    }
-
-    @Override
     public void close() throws IOException {
         inputScanner.close();
         aggrCache.close();
@@ -598,4 +593,4 @@ public class GTAggregateScanner implements IGTScanner {
             }
         }
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/core-cube/src/main/java/org/apache/kylin/gridtable/GTFilterScanner.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTFilterScanner.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTFilterScanner.java
index f1f84af..717f89c 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTFilterScanner.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTFilterScanner.java
@@ -63,11 +63,6 @@ public class GTFilterScanner implements IGTScanner {
     }
 
     @Override
-    public long getScannedRowCount() {
-        return inputScanner.getScannedRowCount();
-    }
-
-    @Override
     public void close() throws IOException {
         inputScanner.close();
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
index 651e5c4..4629c8e 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/GTScanRequest.java
@@ -42,7 +42,7 @@ import com.google.common.collect.Sets;
 public class GTScanRequest {
 
     private static final Logger logger = LoggerFactory.getLogger(GTScanRequest.class);
-    
+
     //it's not necessary to increase the checkInterval to very large because the check cost is not high
     //changing it might break org.apache.kylin.query.ITKylinQueryTest.testTimeoutQuery()
     public static final int terminateCheckInterval = 100;
@@ -175,8 +175,8 @@ public class GTScanRequest {
     public IGTScanner decorateScanner(IGTScanner scanner, boolean filterToggledOn, boolean aggrToggledOn, boolean hasPreFiltered, boolean spillEnabled) throws IOException {
         IGTScanner result = scanner;
         if (!filterToggledOn) { //Skip reading this section if you're not profiling! 
-            int scanned = lookAndForget(result);
-            return new EmptyGTScanner(scanned);
+            lookAndForget(result);
+            return new EmptyGTScanner();
         } else {
 
             if (this.hasFilterPushDown() && !hasPreFiltered) {
@@ -184,9 +184,8 @@ public class GTScanRequest {
             }
 
             if (!aggrToggledOn) {//Skip reading this section if you're not profiling! 
-                long scanned = result.getScannedRowCount();
                 lookAndForget(result);
-                return new EmptyGTScanner(scanned);
+                return new EmptyGTScanner();
             }
 
             if (!this.isAllowStorageAggregation()) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/core-cube/src/main/java/org/apache/kylin/gridtable/IGTScanner.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/IGTScanner.java b/core-cube/src/main/java/org/apache/kylin/gridtable/IGTScanner.java
index 980787b..96c7972 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/IGTScanner.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/IGTScanner.java
@@ -21,9 +21,5 @@ package org.apache.kylin.gridtable;
 import java.io.Closeable;
 
 public interface IGTScanner extends Iterable<GTRecord>, Closeable {
-
     GTInfo getInfo();
-
-    long getScannedRowCount();
-
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/core-cube/src/main/java/org/apache/kylin/gridtable/benchmark/SortedGTRecordGenerator.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/benchmark/SortedGTRecordGenerator.java b/core-cube/src/main/java/org/apache/kylin/gridtable/benchmark/SortedGTRecordGenerator.java
index 9c839bb..71a0a21 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/benchmark/SortedGTRecordGenerator.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/benchmark/SortedGTRecordGenerator.java
@@ -184,12 +184,6 @@ public class SortedGTRecordGenerator {
         public GTInfo getInfo() {
             return info;
         }
-
-        @Override
-        public long getScannedRowCount() {
-            return counter;
-        }
-
     }
 
     private class Distribution {

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/core-cube/src/main/java/org/apache/kylin/gridtable/memstore/GTSimpleMemStore.java
----------------------------------------------------------------------
diff --git a/core-cube/src/main/java/org/apache/kylin/gridtable/memstore/GTSimpleMemStore.java b/core-cube/src/main/java/org/apache/kylin/gridtable/memstore/GTSimpleMemStore.java
index f9f370b..e1b5406 100644
--- a/core-cube/src/main/java/org/apache/kylin/gridtable/memstore/GTSimpleMemStore.java
+++ b/core-cube/src/main/java/org/apache/kylin/gridtable/memstore/GTSimpleMemStore.java
@@ -106,11 +106,6 @@ public class GTSimpleMemStore implements IGTStore {
             }
 
             @Override
-            public long getScannedRowCount() {
-                return count;
-            }
-
-            @Override
             public void close() throws IOException {
             }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheSpillTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheSpillTest.java b/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheSpillTest.java
index 7abb069..8b2243c 100644
--- a/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheSpillTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/gridtable/AggregationCacheSpillTest.java
@@ -68,11 +68,6 @@ public class AggregationCacheSpillTest extends LocalFileMetadataTestCase {
             }
 
             @Override
-            public long getScannedRowCount() {
-                throw new UnsupportedOperationException();
-            }
-
-            @Override
             public void close() throws IOException {
             }
 
@@ -109,11 +104,6 @@ public class AggregationCacheSpillTest extends LocalFileMetadataTestCase {
             }
 
             @Override
-            public long getScannedRowCount() {
-                throw new UnsupportedOperationException();
-            }
-
-            @Override
             public void close() throws IOException {
             }
 
@@ -141,4 +131,4 @@ public class AggregationCacheSpillTest extends LocalFileMetadataTestCase {
         assertEquals(10, count);
         scanner.close();
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/core-cube/src/test/java/org/apache/kylin/gridtable/SimpleGridTableTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/gridtable/SimpleGridTableTest.java b/core-cube/src/test/java/org/apache/kylin/gridtable/SimpleGridTableTest.java
index 4ac6644..14a25c5 100644
--- a/core-cube/src/test/java/org/apache/kylin/gridtable/SimpleGridTableTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/gridtable/SimpleGridTableTest.java
@@ -18,7 +18,6 @@
 
 package org.apache.kylin.gridtable;
 
-import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
@@ -53,7 +52,6 @@ public class SimpleGridTableTest extends LocalFileMetadataTestCase {
 
         GTBuilder builder = rebuild(table);
         IGTScanner scanner = scan(table);
-        assertEquals(builder.getWrittenRowCount(), scanner.getScannedRowCount());
     }
 
     @Test
@@ -64,7 +62,6 @@ public class SimpleGridTableTest extends LocalFileMetadataTestCase {
 
         GTBuilder builder = rebuild(table);
         IGTScanner scanner = scan(table);
-        assertEquals(builder.getWrittenRowCount(), scanner.getScannedRowCount());
     }
 
     @Test
@@ -75,7 +72,6 @@ public class SimpleGridTableTest extends LocalFileMetadataTestCase {
 
         GTBuilder builder = rebuild(table);
         IGTScanner scanner = scanAndAggregate(table);
-        assertEquals(builder.getWrittenRowCount(), scanner.getScannedRowCount());
     }
 
     @Test
@@ -86,7 +82,6 @@ public class SimpleGridTableTest extends LocalFileMetadataTestCase {
 
         rebuildViaAppend(table);
         IGTScanner scanner = scan(table);
-        assertEquals(10, scanner.getScannedRowCount());
     }
 
     private IGTScanner scan(GridTable table) throws IOException {
@@ -101,12 +96,11 @@ public class SimpleGridTableTest extends LocalFileMetadataTestCase {
             System.out.println(r);
         }
         scanner.close();
-        System.out.println("Scanned Row Count: " + scanner.getScannedRowCount());
         return scanner;
     }
 
     private IGTScanner scanAndAggregate(GridTable table) throws IOException {
-        GTScanRequest req = new GTScanRequestBuilder().setInfo(table.getInfo()).setRanges(null).setDimensions(null).setAggrGroupBy(setOf(0, 2)).setAggrMetrics(setOf(3, 4)).setAggrMetricsFuncs(new String[]{"count", "sum"}).setFilterPushDown(null).createGTScanRequest();
+        GTScanRequest req = new GTScanRequestBuilder().setInfo(table.getInfo()).setRanges(null).setDimensions(null).setAggrGroupBy(setOf(0, 2)).setAggrMetrics(setOf(3, 4)).setAggrMetricsFuncs(new String[] { "count", "sum" }).setFilterPushDown(null).createGTScanRequest();
         IGTScanner scanner = table.scan(req);
         int i = 0;
         for (GTRecord r : scanner) {
@@ -135,7 +129,6 @@ public class SimpleGridTableTest extends LocalFileMetadataTestCase {
             System.out.println(r);
         }
         scanner.close();
-        System.out.println("Scanned Row Count: " + scanner.getScannedRowCount());
         return scanner;
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java
index 029502c..4f206d4 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/CubeSegmentScanner.java
@@ -96,11 +96,6 @@ public class CubeSegmentScanner implements IGTScanner {
         return scanRequest == null ? null : scanRequest.getInfo();
     }
 
-    @Override
-    public long getScannedRowCount() {
-        return scanner.getScannedRowCount();
-    }
-
     public CubeSegment getSegment() {
         return this.cubeSeg;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/ScannerWorker.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/ScannerWorker.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/ScannerWorker.java
index fd50c54..9e89227 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/ScannerWorker.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/ScannerWorker.java
@@ -18,6 +18,10 @@
 
 package org.apache.kylin.storage.gtrecord;
 
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.util.Iterator;
+
 import org.apache.kylin.cube.cuboid.Cuboid;
 import org.apache.kylin.gridtable.EmptyGTScanner;
 import org.apache.kylin.gridtable.GTInfo;
@@ -29,10 +33,6 @@ import org.apache.kylin.metadata.model.ISegment;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.lang.reflect.InvocationTargetException;
-import java.util.Iterator;
-
 public class ScannerWorker {
 
     private static final Logger logger = LoggerFactory.getLogger(ScannerWorker.class);
@@ -41,7 +41,7 @@ public class ScannerWorker {
     public ScannerWorker(ISegment segment, Cuboid cuboid, GTScanRequest scanRequest, String gtStorage) {
         if (scanRequest == null) {
             logger.info("Segment {} will be skipped", segment);
-            internal = new EmptyGTScanner(0);
+            internal = new EmptyGTScanner();
             return;
         }
 
@@ -62,9 +62,4 @@ public class ScannerWorker {
     public void close() throws IOException {
         internal.close();
     }
-
-    public long getScannedRowCount() {
-        return internal.getScannedRowCount();
-    }
-
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/StorageResponseGTScatter.java
----------------------------------------------------------------------
diff --git a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/StorageResponseGTScatter.java b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/StorageResponseGTScatter.java
index dc8746f..3904b5c 100644
--- a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/StorageResponseGTScatter.java
+++ b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/StorageResponseGTScatter.java
@@ -45,15 +45,13 @@ public class StorageResponseGTScatter implements IGTScanner {
     private IPartitionStreamer partitionStreamer;
     private Iterator<byte[]> blocks;
     private ImmutableBitSet columns;
-    private long totalScannedCount;
     private int storagePushDownLimit = -1;
 
-    public StorageResponseGTScatter(GTInfo info, IPartitionStreamer partitionStreamer, ImmutableBitSet columns, long totalScannedCount, int storagePushDownLimit) {
+    public StorageResponseGTScatter(GTInfo info, IPartitionStreamer partitionStreamer, ImmutableBitSet columns, int storagePushDownLimit) {
         this.info = info;
         this.partitionStreamer = partitionStreamer;
         this.blocks = partitionStreamer.asByteArrayIterator();
         this.columns = columns;
-        this.totalScannedCount = totalScannedCount;
         this.storagePushDownLimit = storagePushDownLimit;
     }
 
@@ -63,11 +61,6 @@ public class StorageResponseGTScatter implements IGTScanner {
     }
 
     @Override
-    public long getScannedRowCount() {
-        return totalScannedCount;
-    }
-
-    @Override
     public void close() throws IOException {
         //If upper consumer failed while consuming the GTRecords, the consumer should call IGTScanner's close method to ensure releasing resource
         partitionStreamer.close();

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/kylin-it/src/test/java/org/apache/kylin/query/ITFailfastQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITFailfastQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITFailfastQueryTest.java
new file mode 100644
index 0000000..a3720c8
--- /dev/null
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITFailfastQueryTest.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+package org.apache.kylin.query;
+
+import java.io.File;
+import java.util.Map;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.QueryContext;
+import org.apache.kylin.common.exceptions.ResourceLimitExceededException;
+import org.apache.kylin.metadata.realization.RealizationType;
+import org.apache.kylin.query.routing.Candidate;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import com.google.common.collect.Maps;
+
+public class ITFailfastQueryTest extends KylinTestBase {
+
+    @Rule
+    public ExpectedException thrown = ExpectedException.none();
+
+    @BeforeClass
+    public static void setUp() throws Exception {
+        printInfo("setUp in ITFailfastQueryTest");
+        Map<RealizationType, Integer> priorities = Maps.newHashMap();
+        priorities.put(RealizationType.HYBRID, 0);
+        priorities.put(RealizationType.CUBE, 0);
+        priorities.put(RealizationType.INVERTED_INDEX, 0);
+        Candidate.setPriorities(priorities);
+        joinType = "left";
+        setupAll();
+    }
+
+    @After
+    public void cleanUp() {
+        QueryContext.reset();
+    }
+
+    @AfterClass
+    public static void tearDown() throws Exception {
+        printInfo("tearDown in ITFailfastQueryTest");
+        Candidate.restorePriorities();
+        clean();
+    }
+
+    @Test
+    public void testPartitionExceedMaxScanBytes() throws Exception {
+        String key = "kylin.storage.partition.max-scan-bytes";
+        long saved = KylinConfig.getInstanceFromEnv().getPartitionMaxScanBytes();
+        KylinConfig.getInstanceFromEnv().setProperty(key, "18000");//very low threshold 
+
+        boolean meetExpectedException = false;
+        try {
+            String queryFileName = getQueryFolderPrefix() + "src/test/resources/query/sql/query01.sql";
+            File sqlFile = new File(queryFileName);
+            try {
+                runSQL(sqlFile, false, false);
+            } catch (Exception e) {
+                if (findRoot(e) instanceof ResourceLimitExceededException) {
+                    //expected
+                    meetExpectedException = true;
+                } else {
+                    throw new RuntimeException(e);
+                }
+            }
+
+            if (!meetExpectedException) {
+                throw new RuntimeException("Did not meet expected exception");
+            }
+        } finally {
+            KylinConfig.getInstanceFromEnv().setProperty(key, String.valueOf(saved));
+        }
+    }
+
+    @Test
+    public void testPartitionNotExceedMaxScanBytes() throws Exception {
+        String key = "kylin.storage.partition.max-scan-bytes";
+        long saved = KylinConfig.getInstanceFromEnv().getPartitionMaxScanBytes();
+        KylinConfig.getInstanceFromEnv().setProperty(key, "20000");//enough threshold 
+
+        try {
+            String queryFileName = getQueryFolderPrefix() + "src/test/resources/query/sql/query01.sql";
+            File sqlFile = new File(queryFileName);
+            runSQL(sqlFile, false, false);
+        } finally {
+            KylinConfig.getInstanceFromEnv().setProperty(key, String.valueOf(saved));
+        }
+    }
+
+    @Test
+    public void testQueryExceedMaxScanBytes() throws Exception {
+        String key = "kylin.query.max-scan-bytes";
+        long saved = KylinConfig.getInstanceFromEnv().getQueryMaxScanBytes();
+        KylinConfig.getInstanceFromEnv().setProperty(key, "30000");//very low threshold 
+
+        boolean meetExpectedException = false;
+        try {
+            String queryFileName = getQueryFolderPrefix() + "src/test/resources/query/sql/query01.sql";
+            File sqlFile = new File(queryFileName);
+            try {
+                runSQL(sqlFile, false, false);
+            } catch (Exception e) {
+                if (findRoot(e) instanceof ResourceLimitExceededException) {
+                    //expected
+                    meetExpectedException = true;
+                } else {
+                    throw new RuntimeException(e);
+                }
+            }
+
+            if (!meetExpectedException) {
+                throw new RuntimeException("Did not meet expected exception");
+            }
+        } finally {
+            KylinConfig.getInstanceFromEnv().setProperty(key, String.valueOf(saved));
+        }
+    }
+
+    @Test
+    public void testQueryNotExceedMaxScanBytes() throws Exception {
+        String key = "kylin.query.max-scan-bytes";
+        long saved = KylinConfig.getInstanceFromEnv().getQueryMaxScanBytes();
+        KylinConfig.getInstanceFromEnv().setProperty(key, "40000");//enough threshold 
+
+        try {
+            String queryFileName = getQueryFolderPrefix() + "src/test/resources/query/sql/query01.sql";
+            File sqlFile = new File(queryFileName);
+            runSQL(sqlFile, false, false);
+        } finally {
+            KylinConfig.getInstanceFromEnv().setProperty(key, String.valueOf(saved));
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
index 842ce40..1158704 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
@@ -74,21 +74,6 @@ public class ITKylinQueryTest extends KylinTestBase {
         clean();
     }
 
-    protected String getQueryFolderPrefix() {
-        return "";
-    }
-
-    protected Throwable findRoot(Throwable throwable) {
-        while (true) {
-            if (throwable.getCause() != null) {
-                throwable = throwable.getCause();
-            } else {
-                break;
-            }
-        }
-        return throwable;
-    }
-
     @Test
     public void testTimeoutQuery() throws Exception {
         try {
@@ -121,9 +106,6 @@ public class ITKylinQueryTest extends KylinTestBase {
             try {
                 runSQL(sqlFile, false, false);
             } catch (SQLException e) {
-
-                System.out.println(e.getMessage());
-
                 if (findRoot(e) instanceof KylinTimeoutException) {
                     //expected
                     continue;

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
index 402aaa0..fd04b2f 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
@@ -692,4 +692,19 @@ public class KylinTestBase {
         return OLAPContext.getThreadLocalContexts().iterator().next();
     }
 
+    protected String getQueryFolderPrefix() {
+        return "";
+    }
+
+    protected Throwable findRoot(Throwable throwable) {
+        while (true) {
+            if (throwable.getCause() != null) {
+                throwable = throwable.getCause();
+            } else {
+                break;
+            }
+        }
+        return throwable;
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
index 4c02aa4..122b823 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -361,14 +361,16 @@ public class QueryService extends BasicService {
                     sqlResponse = query(sqlRequest);
 
                     long durationThreshold = kylinConfig.getQueryDurationCacheThreshold();
-                    long scancountThreshold = kylinConfig.getQueryScanCountCacheThreshold();
+                    long scanCountThreshold = kylinConfig.getQueryScanCountCacheThreshold();
+                    long scanBytesThreshold = kylinConfig.getQueryScanBytesCacheThreshold();
                     sqlResponse.setDuration(System.currentTimeMillis() - startTime);
                     logger.info("Stats of SQL response: isException: {}, duration: {}, total scan count {}", //
                             String.valueOf(sqlResponse.getIsException()), String.valueOf(sqlResponse.getDuration()), String.valueOf(sqlResponse.getTotalScanCount()));
-                    if (checkCondition(queryCacheEnabled, "query cache is disabled") && //
-                            checkCondition(!sqlResponse.getIsException(), "query has exception") && //
-                            checkCondition(sqlResponse.getDuration() > durationThreshold || sqlResponse.getTotalScanCount() > scancountThreshold, "query is too lightweight with duration: {} ({}), scan count: {} ({})", sqlResponse.getDuration(), durationThreshold, sqlResponse.getTotalScanCount(), scancountThreshold) && // 
-                            checkCondition(sqlResponse.getResults().size() < kylinConfig.getLargeQueryThreshold(), "query response is too large: {} ({})", sqlResponse.getResults().size(), kylinConfig.getLargeQueryThreshold())) {
+                    if (checkCondition(queryCacheEnabled, "query cache is disabled") //
+                            && checkCondition(!sqlResponse.getIsException(), "query has exception") //
+                            && checkCondition(sqlResponse.getDuration() > durationThreshold || sqlResponse.getTotalScanCount() > scanCountThreshold || sqlResponse.getTotalScanBytes() > scanBytesThreshold, //
+                                    "query is too lightweight with duration: {} (threshold {}), scan count: {} (threshold {}), scan bytes: {} (threshold {})", sqlResponse.getDuration(), durationThreshold, sqlResponse.getTotalScanCount(), scanCountThreshold, sqlResponse.getTotalScanBytes(), scanBytesThreshold)
+                            && checkCondition(sqlResponse.getResults().size() < kylinConfig.getLargeQueryThreshold(), "query response is too large: {} ({})", sqlResponse.getResults().size(), kylinConfig.getLargeQueryThreshold())) {
                         cacheManager.getCache(SUCCESS_QUERY_CACHE).put(new Element(sqlRequest, sqlResponse));
                     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/HBaseScannerBenchmark.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/HBaseScannerBenchmark.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/HBaseScannerBenchmark.java
index 3fdb92f..3eecba1 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/HBaseScannerBenchmark.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/HBaseScannerBenchmark.java
@@ -132,9 +132,6 @@ public class HBaseScannerBenchmark {
         }
         scan.close();
 
-        if (scan.getScannedRowCount() != count)
-            throw new IllegalStateException();
-
         t = System.currentTimeMillis() - t;
         logger.info(msg + ", " + count + " records, " + speed(t) + "K rec/sec");
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
index f63d9c2..b12173d 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
@@ -205,10 +205,5 @@ public class SimpleHBaseStore implements IGTStore {
         public GTInfo getInfo() {
             return info;
         }
-
-        @Override
-        public long getScannedRowCount() {
-            return count;
-        }
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
index abc3437..82b67b6 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
@@ -23,7 +23,6 @@ import java.nio.BufferOverflowException;
 import java.nio.ByteBuffer;
 import java.util.List;
 import java.util.concurrent.ExecutorService;
-import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.zip.DataFormatException;
 
@@ -146,8 +145,6 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
 
         logger.debug("Submitting rpc to {} shards starting from shard {}, scan range count {}", shardNum, cuboidBaseShard, rawScans.size());
 
-        final AtomicLong totalScannedCount = new AtomicLong(0);
-
         // KylinConfig: use env instance instead of CubeSegment, because KylinConfig will share among queries
         // for different cubes until redeployment of coprocessor jar.
         final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
@@ -205,7 +202,6 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
                                         Stats stats = result.getStats();
                                         queryContext.addAndGetScannedRows(stats.getScannedRowCount());
                                         queryContext.addAndGetScannedBytes(stats.getScannedBytes());
-                                        totalScannedCount.addAndGet(stats.getScannedRowCount());
 
                                         // if any other region has responded with error, skip further processing
                                         if (regionErrorHolder.get() != null) {
@@ -249,7 +245,7 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
             });
         }
 
-        return new StorageResponseGTScatter(fullGTInfo, new DummyPartitionStreamer(epResultItr), scanRequest.getColumns(), totalScannedCount.get(), scanRequest.getStoragePushDownLimit());
+        return new StorageResponseGTScatter(fullGTInfo, new DummyPartitionStreamer(epResultItr), scanRequest.getColumns(), scanRequest.getStoragePushDownLimit());
     }
 
     private ByteString serializeGTScanReq(GTScanRequest scanRequest) {
@@ -317,14 +313,14 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
         CubeVisitResponse.ErrorInfo errorInfo = response.getErrorInfo();
 
         switch (errorInfo.getType()) {
-            case UNKNOWN_TYPE:
-                return new RuntimeException("Coprocessor aborts: " + errorInfo.getMessage());
-            case TIMEOUT:
-                return new KylinTimeoutException(errorInfo.getMessage());
-            case RESOURCE_LIMIT_EXCEEDED:
-                return new ResourceLimitExceededException("Coprocessor resource limit exceeded: " + errorInfo.getMessage());
-            default:
-                throw new AssertionError("Unknown error type: " + errorInfo.getType());
+        case UNKNOWN_TYPE:
+            return new RuntimeException("Coprocessor aborts: " + errorInfo.getMessage());
+        case TIMEOUT:
+            return new KylinTimeoutException(errorInfo.getMessage());
+        case RESOURCE_LIMIT_EXCEEDED:
+            return new ResourceLimitExceededException("Coprocessor resource limit exceeded: " + errorInfo.getMessage());
+        default:
+            throw new AssertionError("Unknown error type: " + errorInfo.getType());
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
index 1698180..33f8d90 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
@@ -93,31 +93,7 @@ public class CubeHBaseScanRPC extends CubeHBaseRPC {
 
     @Override
     public IGTScanner getGTScanner(final GTScanRequest scanRequest) throws IOException {
-        final IGTScanner scanner = getGTScannerInternal(scanRequest);
-
-        return new IGTScanner() {
-            @Override
-            public GTInfo getInfo() {
-                return scanner.getInfo();
-            }
-
-            @Override
-            public long getScannedRowCount() {
-                long sum = 0;
-                sum += scanner.getScannedRowCount();
-                return sum;
-            }
-
-            @Override
-            public void close() throws IOException {
-                scanner.close();
-            }
-
-            @Override
-            public Iterator<GTRecord> iterator() {
-                return scanner.iterator();
-            }
-        };
+        return getGTScannerInternal(scanRequest);
     }
 
     //for non-sharding cases it will only return one byte[] with not shard at beginning
@@ -229,11 +205,6 @@ public class CubeHBaseScanRPC extends CubeHBaseRPC {
             }
 
             @Override
-            public long getScannedRowCount() {
-                return decorateScanner.getScannedRowCount();
-            }
-
-            @Override
             public void close() throws IOException {
                 decorateScanner.close();
             }
@@ -244,4 +215,4 @@ public class CubeHBaseScanRPC extends CubeHBaseRPC {
             }
         };
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java
index 4c02dff..631e8e8 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/HBaseReadonlyStore.java
@@ -152,11 +152,6 @@ public class HBaseReadonlyStore implements IGTStore {
             public GTInfo getInfo() {
                 return info;
             }
-
-            @Override
-            public long getScannedRowCount() {
-                return count;
-            }
         };
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
index 61cf067..cde127e 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
@@ -293,7 +293,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
             ResourceTrackingCellListIterator cellListIterator = new ResourceTrackingCellListIterator(
                     allCellLists,
                     scanReq.getStorageScanRowNumThreshold(), // for old client (scan threshold)
-                    request.hasMaxScanBytes() ? Long.MAX_VALUE : request.getMaxScanBytes(), // for new client
+                    !request.hasMaxScanBytes() ? Long.MAX_VALUE : request.getMaxScanBytes(), // for new client
                     scanReq.getTimeout());
 
             IGTStore store = new HBaseReadonlyStore(cellListIterator, scanReq, hbaseRawScans.get(0).hbaseColumns, hbaseColumnsToGT, request.getRowkeyPreambleSize(), behavior.delayToggledOn());

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/protobuf/CubeVisit.proto
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/protobuf/CubeVisit.proto b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/protobuf/CubeVisit.proto
index e01ff52..aa83595 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/protobuf/CubeVisit.proto
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/protobuf/CubeVisit.proto
@@ -37,7 +37,7 @@ message CubeVisitRequest {
     required string kylinProperties = 5; // kylin properties
     optional string queryId = 6;
     optional bool spillEnabled = 7 [default = true];
-    optional int64 maxScanBytes = 8; // 0 means no limit
+    optional int64 maxScanBytes = 8; // must be positive
     message IntList {
         repeated int32 ints = 1;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/4bdb62cb/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java
index 691886b..62b154e 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java
@@ -36,7 +36,6 @@ import org.slf4j.LoggerFactory;
  * It is desinged to run in hadoop CLI, both in sandbox or in real hadoop environment
  */
 public class SandboxMetastoreCLI {
-
     private static final Logger logger = LoggerFactory.getLogger(SandboxMetastoreCLI.class);
 
     public static void main(String[] args) throws Exception {


[13/13] kylin git commit: KYLIN-2307 Create a branch for master with HBase 0.98 API

Posted by li...@apache.org.
KYLIN-2307 Create a branch for master with HBase 0.98 API


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/da9b080f
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/da9b080f
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/da9b080f

Branch: refs/heads/master-hbase0.98
Commit: da9b080f939b8e95a375adba3ae8014452901d78
Parents: 4bdb62c
Author: lidongsjtu <li...@apache.org>
Authored: Mon Jan 23 13:17:37 2017 +0800
Committer: root <ro...@sandbox.hortonworks.com>
Committed: Wed Feb 22 02:30:33 2017 +0000

----------------------------------------------------------------------
 dev-support/test_all_against_hdp_2_2_4_2_2.sh   |  25 ++++
 dev-support/test_all_against_hdp_2_4_0_0_169.sh |  25 ----
 .../sandbox/capacity-scheduler.xml              |  17 ++-
 examples/test_case_data/sandbox/core-site.xml   |  28 +---
 examples/test_case_data/sandbox/hbase-site.xml  | 119 +++++------------
 examples/test_case_data/sandbox/hdfs-site.xml   |  84 +++++-------
 examples/test_case_data/sandbox/hive-site.xml   |  89 +++++--------
 examples/test_case_data/sandbox/mapred-site.xml |  57 +++------
 examples/test_case_data/sandbox/yarn-site.xml   | 127 +++----------------
 .../kylin/provision/BuildCubeWithEngine.java    |  17 +--
 pom.xml                                         | 117 +----------------
 .../kylin/rest/security/AclHBaseStorage.java    |   4 +-
 .../rest/security/MockAclHBaseStorage.java      |   8 +-
 .../apache/kylin/rest/security/MockHTable.java  |  95 +++++++++++---
 .../rest/security/RealAclHBaseStorage.java      |   9 +-
 .../apache/kylin/rest/service/AclService.java   |  25 ++--
 .../apache/kylin/rest/service/CubeService.java  |  35 +++--
 .../apache/kylin/rest/service/QueryService.java |  24 ++--
 .../apache/kylin/rest/service/UserService.java  |  17 +--
 .../kylin/storage/hbase/HBaseConnection.java    |  44 +++----
 .../kylin/storage/hbase/HBaseResourceStore.java |  31 +++--
 .../storage/hbase/cube/SimpleHBaseStore.java    |  20 +--
 .../hbase/cube/v2/CubeHBaseEndpointRPC.java     |  13 +-
 .../storage/hbase/cube/v2/CubeHBaseScanRPC.java |   9 +-
 .../coprocessor/endpoint/CubeVisitService.java  |   4 +-
 .../storage/hbase/steps/CubeHTableUtil.java     |  16 +--
 .../storage/hbase/steps/DeprecatedGCStep.java   |  24 ++--
 .../storage/hbase/steps/HBaseCuboidWriter.java  |   7 +-
 .../kylin/storage/hbase/steps/MergeGCStep.java  |  23 ++--
 .../storage/hbase/util/CleanHtableCLI.java      |  12 +-
 .../storage/hbase/util/CubeMigrationCLI.java    |  37 +++---
 .../hbase/util/CubeMigrationCheckCLI.java       |  17 +--
 .../hbase/util/DeployCoprocessorCLI.java        |  27 ++--
 .../hbase/util/ExtendCubeToHybridCLI.java       |   8 +-
 .../hbase/util/GridTableHBaseBenchmark.java     |  34 ++---
 .../kylin/storage/hbase/util/HBaseClean.java    |  18 ++-
 .../hbase/util/HBaseRegionSizeCalculator.java   |  35 +++--
 .../kylin/storage/hbase/util/HBaseUsage.java    |   9 +-
 .../storage/hbase/util/HbaseStreamingInput.java |  30 ++---
 .../hbase/util/HtableAlterMetadataCLI.java      |   9 +-
 .../storage/hbase/util/OrphanHBaseCleanJob.java |  19 +--
 .../kylin/storage/hbase/util/PingHBaseCLI.java  |  15 +--
 .../kylin/storage/hbase/util/RowCounterCLI.java |  11 +-
 .../storage/hbase/util/StorageCleanupJob.java   |  20 ++-
 .../storage/hbase/util/UpdateHTableHostCLI.java |  17 +--
 tool/pom.xml                                    |  10 --
 .../org/apache/kylin/tool/CubeMigrationCLI.java |  19 +--
 .../kylin/tool/ExtendCubeToHybridCLI.java       |   8 +-
 48 files changed, 596 insertions(+), 872 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/dev-support/test_all_against_hdp_2_2_4_2_2.sh
----------------------------------------------------------------------
diff --git a/dev-support/test_all_against_hdp_2_2_4_2_2.sh b/dev-support/test_all_against_hdp_2_2_4_2_2.sh
new file mode 100755
index 0000000..f7780dd
--- /dev/null
+++ b/dev-support/test_all_against_hdp_2_2_4_2_2.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+dir=$(dirname ${0})
+cd ${dir}
+cd ..
+
+mvn clean install -DskipTests 2>&1 | tee mci.log
+mvn verify -Dhdp.version=${HDP_VERSION:-"2.2.4.2-2"} -fae 2>&1 | tee mvnverify.log

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/dev-support/test_all_against_hdp_2_4_0_0_169.sh
----------------------------------------------------------------------
diff --git a/dev-support/test_all_against_hdp_2_4_0_0_169.sh b/dev-support/test_all_against_hdp_2_4_0_0_169.sh
deleted file mode 100755
index 2a3d24b..0000000
--- a/dev-support/test_all_against_hdp_2_4_0_0_169.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/bash
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-dir=$(dirname ${0})
-cd ${dir}
-cd ..
-
-mvn clean install -DskipTests 2>&1 | tee mci.log
-mvn verify -Dhdp.version=${HDP_VERSION:-"2.4.0.0-169"} -fae 2>&1 | tee mvnverify.log

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/examples/test_case_data/sandbox/capacity-scheduler.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/capacity-scheduler.xml b/examples/test_case_data/sandbox/capacity-scheduler.xml
index e042aa5..7cb985c 100644
--- a/examples/test_case_data/sandbox/capacity-scheduler.xml
+++ b/examples/test_case_data/sandbox/capacity-scheduler.xml
@@ -47,6 +47,16 @@
     </property>
 
     <property>
+        <name>yarn.scheduler.capacity.root.accessible-node-labels.default.capacity</name>
+        <value>-1</value>
+    </property>
+
+    <property>
+        <name>yarn.scheduler.capacity.root.accessible-node-labels.default.maximum-capacity</name>
+        <value>-1</value>
+    </property>
+
+    <property>
         <name>yarn.scheduler.capacity.root.acl_administer_queue</name>
         <value>*</value>
     </property>
@@ -57,6 +67,11 @@
     </property>
 
     <property>
+        <name>yarn.scheduler.capacity.root.default-node-label-expression</name>
+        <value></value>
+    </property>
+
+    <property>
         <name>yarn.scheduler.capacity.root.default.acl_administer_jobs</name>
         <value>*</value>
     </property>
@@ -96,4 +111,4 @@
         <value>default</value>
     </property>
 
-</configuration>
\ No newline at end of file
+</configuration>

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/examples/test_case_data/sandbox/core-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/core-site.xml b/examples/test_case_data/sandbox/core-site.xml
index a4ad5c6..0c5f62b 100644
--- a/examples/test_case_data/sandbox/core-site.xml
+++ b/examples/test_case_data/sandbox/core-site.xml
@@ -19,6 +19,7 @@
     <property>
         <name>fs.defaultFS</name>
         <value>hdfs://sandbox.hortonworks.com:8020</value>
+        <final>true</final>
     </property>
 
     <property>
@@ -38,7 +39,7 @@
 
     <property>
         <name>hadoop.proxyuser.falcon.groups</name>
-        <value>*</value>
+        <value>users</value>
     </property>
 
     <property>
@@ -48,7 +49,7 @@
 
     <property>
         <name>hadoop.proxyuser.hbase.groups</name>
-        <value>*</value>
+        <value>users</value>
     </property>
 
     <property>
@@ -67,23 +68,13 @@
     </property>
 
     <property>
-        <name>hadoop.proxyuser.hdfs.groups</name>
-        <value>*</value>
-    </property>
-
-    <property>
-        <name>hadoop.proxyuser.hdfs.hosts</name>
-        <value>*</value>
-    </property>
-
-    <property>
         <name>hadoop.proxyuser.hive.groups</name>
-        <value>*</value>
+        <value>users</value>
     </property>
 
     <property>
         <name>hadoop.proxyuser.hive.hosts</name>
-        <value>sandbox.hortonworks.com</value>
+        <value>*</value>
     </property>
 
     <property>
@@ -132,15 +123,8 @@
     </property>
 
     <property>
-        <name>hadoop.security.key.provider.path</name>
-        <value></value>
-    </property>
-
-    <property>
         <name>io.compression.codecs</name>
-        <value>
-            org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec
-        </value>
+        <value>org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec</value>
     </property>
 
     <property>

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/examples/test_case_data/sandbox/hbase-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/hbase-site.xml b/examples/test_case_data/sandbox/hbase-site.xml
index 568de2e..46d5345 100644
--- a/examples/test_case_data/sandbox/hbase-site.xml
+++ b/examples/test_case_data/sandbox/hbase-site.xml
@@ -22,33 +22,8 @@
     </property>
 
     <property>
-        <name>hbase.bucketcache.ioengine</name>
-        <value></value>
-    </property>
-
-    <property>
-        <name>hbase.bucketcache.percentage.in.combinedcache</name>
-        <value></value>
-    </property>
-
-    <property>
-        <name>hbase.bucketcache.size</name>
-        <value></value>
-    </property>
-
-    <property>
-        <name>hbase.bulkload.staging.dir</name>
-        <value>/apps/hbase/staging</value>
-    </property>
-
-    <property>
         <name>hbase.client.keyvalue.maxsize</name>
-        <value>1048576</value>
-    </property>
-
-    <property>
-        <name>hbase.client.retries.number</name>
-        <value>35</value>
+        <value>10485760</value>
     </property>
 
     <property>
@@ -63,19 +38,12 @@
 
     <property>
         <name>hbase.coprocessor.master.classes</name>
-        <value>org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor</value>
+        <value>com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor</value>
     </property>
 
     <property>
         <name>hbase.coprocessor.region.classes</name>
-        <value>
-            org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor
-        </value>
-    </property>
-
-    <property>
-        <name>hbase.coprocessor.regionserver.classes</name>
-        <value></value>
+        <value>com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor</value>
     </property>
 
     <property>
@@ -119,11 +87,6 @@
     </property>
 
     <property>
-        <name>hbase.hstore.compaction.max</name>
-        <value>10</value>
-    </property>
-
-    <property>
         <name>hbase.hstore.compactionThreshold</name>
         <value>3</value>
     </property>
@@ -140,42 +103,32 @@
 
     <property>
         <name>hbase.master.info.port</name>
-        <value>16010</value>
+        <value>60010</value>
     </property>
 
     <property>
         <name>hbase.master.port</name>
-        <value>16000</value>
+        <value>60000</value>
     </property>
 
     <property>
-        <name>hbase.region.server.rpc.scheduler.factory.class</name>
-        <value></value>
+        <name>hbase.regionserver.global.memstore.lowerLimit</name>
+        <value>0.38</value>
     </property>
 
     <property>
-        <name>hbase.regionserver.global.memstore.size</name>
+        <name>hbase.regionserver.global.memstore.upperLimit</name>
         <value>0.4</value>
     </property>
 
     <property>
         <name>hbase.regionserver.handler.count</name>
-        <value>30</value>
+        <value>60</value>
     </property>
 
     <property>
         <name>hbase.regionserver.info.port</name>
-        <value>16030</value>
-    </property>
-
-    <property>
-        <name>hbase.regionserver.port</name>
-        <value>16020</value>
-    </property>
-
-    <property>
-        <name>hbase.regionserver.wal.codec</name>
-        <value>org.apache.hadoop.hbase.regionserver.wal.WALCellCodec</value>
+        <value>60030</value>
     </property>
 
     <property>
@@ -184,26 +137,11 @@
     </property>
 
     <property>
-        <name>hbase.rpc.controllerfactory.class</name>
-        <value></value>
-    </property>
-
-    <property>
-        <name>hbase.rpc.engine</name>
-        <value>org.apache.hadoop.hbase.ipc.SecureRpcEngine</value>
-    </property>
-
-    <property>
         <name>hbase.rpc.protection</name>
         <value>PRIVACY</value>
     </property>
 
     <property>
-        <name>hbase.rpc.timeout</name>
-        <value>90000</value>
-    </property>
-
-    <property>
         <name>hbase.security.authentication</name>
         <value>simple</value>
     </property>
@@ -220,7 +158,7 @@
 
     <property>
         <name>hbase.tmp.dir</name>
-        <value>/tmp/hbase-${user.name}</value>
+        <value>/hadoop/hbase</value>
     </property>
 
     <property>
@@ -240,27 +178,34 @@
 
     <property>
         <name>hfile.block.cache.size</name>
-        <value>0.4</value>
-    </property>
-
-    <property>
-        <name>phoenix.functions.allowUserDefinedFunctions</name>
-        <value></value>
-    </property>
-
-    <property>
-        <name>phoenix.query.timeoutMs</name>
-        <value>60000</value>
+        <value>0.40</value>
     </property>
 
     <property>
         <name>zookeeper.session.timeout</name>
-        <value>60000</value>
+        <value>30000</value>
     </property>
 
     <property>
         <name>zookeeper.znode.parent</name>
         <value>/hbase-unsecure</value>
     </property>
-
-</configuration>
\ No newline at end of file
+    <property>
+        <name>hbase.client.pause</name>
+        <value>100</value>
+        <description>General client pause value.  Used mostly as value to wait
+            before running a retry of a failed get, region lookup, etc.
+            See hbase.client.retries.number for description of how we backoff from
+            this initial pause amount and how this pause works w/ retries.</description>
+    </property>
+    <property>
+        <name>hbase.client.retries.number</name>
+        <value>5</value>
+        <description>Maximum retries.  Used as maximum for all retryable
+            operations such as the getting of a cell's value, starting a row update,
+            etc.  Retry interval is a rough function based on hbase.client.pause.  At
+            first we retry at this interval but then with backoff, we pretty quickly reach
+            retrying every ten seconds.  See HConstants#RETRY_BACKOFF for how the backup
+            ramps up.  Change this setting and hbase.client.pause to suit your workload.</description>
+    </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/examples/test_case_data/sandbox/hdfs-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/hdfs-site.xml b/examples/test_case_data/sandbox/hdfs-site.xml
index 1d9040a..1175fff 100644
--- a/examples/test_case_data/sandbox/hdfs-site.xml
+++ b/examples/test_case_data/sandbox/hdfs-site.xml
@@ -18,7 +18,12 @@
 
     <property>
         <name>dfs.block.access.token.enable</name>
-        <value>true</value>
+        <value>false</value>
+    </property>
+
+    <property>
+        <name>dfs.block.size</name>
+        <value>34217472</value>
     </property>
 
     <property>
@@ -42,21 +47,11 @@
     </property>
 
     <property>
-        <name>dfs.client.retry.policy.enabled</name>
-        <value>false</value>
-    </property>
-
-    <property>
         <name>dfs.cluster.administrators</name>
         <value>hdfs</value>
     </property>
 
     <property>
-        <name>dfs.content-summary.limit</name>
-        <value>5000</value>
-    </property>
-
-    <property>
         <name>dfs.datanode.address</name>
         <value>0.0.0.0:50010</value>
     </property>
@@ -69,6 +64,7 @@
     <property>
         <name>dfs.datanode.data.dir</name>
         <value>/hadoop/hdfs/data</value>
+        <final>true</final>
     </property>
 
     <property>
@@ -84,6 +80,7 @@
     <property>
         <name>dfs.datanode.failed.volumes.tolerated</name>
         <value>0</value>
+        <final>true</final>
     </property>
 
     <property>
@@ -107,18 +104,13 @@
     </property>
 
     <property>
-        <name>dfs.domain.socket.path</name>
-        <value>/var/lib/hadoop-hdfs/dn_socket</value>
-    </property>
-
-    <property>
-        <name>dfs.encrypt.data.transfer.cipher.suites</name>
-        <value>AES/CTR/NoPadding</value>
+        <name>dfs.datanode.max.xcievers</name>
+        <value>1024</value>
     </property>
 
     <property>
-        <name>dfs.encryption.key.provider.uri</name>
-        <value></value>
+        <name>dfs.domain.socket.path</name>
+        <value>/var/lib/hadoop-hdfs/dn_socket</value>
     </property>
 
     <property>
@@ -158,12 +150,7 @@
 
     <property>
         <name>dfs.namenode.accesstime.precision</name>
-        <value>0</value>
-    </property>
-
-    <property>
-        <name>dfs.namenode.audit.log.async</name>
-        <value>true</value>
+        <value>3600000</value>
     </property>
 
     <property>
@@ -197,11 +184,6 @@
     </property>
 
     <property>
-        <name>dfs.namenode.fslock.fair</name>
-        <value>false</value>
-    </property>
-
-    <property>
         <name>dfs.namenode.handler.count</name>
         <value>100</value>
     </property>
@@ -209,6 +191,7 @@
     <property>
         <name>dfs.namenode.http-address</name>
         <value>sandbox.hortonworks.com:50070</value>
+        <final>true</final>
     </property>
 
     <property>
@@ -217,13 +200,9 @@
     </property>
 
     <property>
-        <name>dfs.namenode.inode.attributes.provider.class</name>
-        <value>org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer</value>
-    </property>
-
-    <property>
         <name>dfs.namenode.name.dir</name>
         <value>/hadoop/hdfs/namenode</value>
+        <final>true</final>
     </property>
 
     <property>
@@ -232,13 +211,8 @@
     </property>
 
     <property>
-        <name>dfs.namenode.rpc-address</name>
-        <value>sandbox.hortonworks.com:8020</value>
-    </property>
-
-    <property>
         <name>dfs.namenode.safemode.threshold-pct</name>
-        <value>0.999</value>
+        <value>1.0f</value>
     </property>
 
     <property>
@@ -262,6 +236,16 @@
     </property>
 
     <property>
+        <name>dfs.nfs.exports.allowed.hosts</name>
+        <value>* rw</value>
+    </property>
+
+    <property>
+        <name>dfs.nfs3.dump.dir</name>
+        <value>/tmp/.hdfs-nfs</value>
+    </property>
+
+    <property>
         <name>dfs.permissions.enabled</name>
         <value>true</value>
     </property>
@@ -273,7 +257,7 @@
 
     <property>
         <name>dfs.replication</name>
-        <value>3</value>
+        <value>1</value>
     </property>
 
     <property>
@@ -284,11 +268,13 @@
     <property>
         <name>dfs.support.append</name>
         <value>true</value>
+        <final>true</final>
     </property>
 
     <property>
         <name>dfs.webhdfs.enabled</name>
         <value>true</value>
+        <final>true</final>
     </property>
 
     <property>
@@ -296,14 +282,4 @@
         <value>022</value>
     </property>
 
-    <property>
-        <name>nfs.exports.allowed.hosts</name>
-        <value>* rw</value>
-    </property>
-
-    <property>
-        <name>nfs.file.dump.dir</name>
-        <value>/tmp/.hdfs-nfs</value>
-    </property>
-
-</configuration>
\ No newline at end of file
+</configuration>

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/examples/test_case_data/sandbox/hive-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/hive-site.xml b/examples/test_case_data/sandbox/hive-site.xml
index a8c210e..1e78107 100644
--- a/examples/test_case_data/sandbox/hive-site.xml
+++ b/examples/test_case_data/sandbox/hive-site.xml
@@ -22,46 +22,11 @@
     </property>
 
     <property>
-        <name>atlas.cluster.name</name>
-        <value>Sandbox</value>
-    </property>
-
-    <property>
-        <name>atlas.hook.hive.maxThreads</name>
-        <value>1</value>
-    </property>
-
-    <property>
-        <name>atlas.hook.hive.minThreads</name>
-        <value>1</value>
-    </property>
-
-    <property>
-        <name>atlas.hook.hive.synchronous</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>atlas.rest.address</name>
-        <value>http://sandbox.hortonworks.com:21000</value>
-    </property>
-
-    <property>
-        <name>datanucleus.autoCreateSchema</name>
-        <value>false</value>
-    </property>
-
-    <property>
         <name>datanucleus.cache.level2.type</name>
         <value>none</value>
     </property>
 
     <property>
-        <name>datanucleus.fixedDatastore</name>
-        <value>true</value>
-    </property>
-
-    <property>
         <name>hive.auto.convert.join</name>
         <value>true</value>
     </property>
@@ -73,7 +38,7 @@
 
     <property>
         <name>hive.auto.convert.join.noconditionaltask.size</name>
-        <value>357913941</value>
+        <value>1000000000</value>
     </property>
 
     <property>
@@ -162,16 +127,6 @@
     </property>
 
     <property>
-        <name>hive.default.fileformat</name>
-        <value>TextFile</value>
-    </property>
-
-    <property>
-        <name>hive.default.fileformat.managed</name>
-        <value>TextFile</value>
-    </property>
-
-    <property>
         <name>hive.enforce.bucketing</name>
         <value>true</value>
     </property>
@@ -207,6 +162,11 @@
     </property>
 
     <property>
+        <name>hive.exec.failure.hooks</name>
+        <value>org.apache.hadoop.hive.ql.hooks.ATSHook</value>
+    </property>
+
+    <property>
         <name>hive.exec.max.created.files</name>
         <value>100000</value>
     </property>
@@ -237,11 +197,6 @@
     </property>
 
     <property>
-        <name>hive.exec.orc.encoding.strategy</name>
-        <value>SPEED</value>
-    </property>
-
-    <property>
         <name>hive.exec.parallel</name>
         <value>false</value>
     </property>
@@ -252,6 +207,16 @@
     </property>
 
     <property>
+        <name>hive.exec.post.hooks</name>
+        <value>org.apache.hadoop.hive.ql.hooks.ATSHook</value>
+    </property>
+
+    <property>
+        <name>hive.exec.pre.hooks</name>
+        <value>org.apache.hadoop.hive.ql.hooks.ATSHook</value>
+    </property>
+
+    <property>
         <name>hive.exec.reducers.bytes.per.reducer</name>
         <value>67108864</value>
     </property>
@@ -297,6 +262,11 @@
     </property>
 
     <property>
+        <name>hive.heapsize</name>
+        <value>250</value>
+    </property>
+
+    <property>
         <name>hive.limit.optimize.enable</name>
         <value>true</value>
     </property>
@@ -508,7 +478,7 @@
 
     <property>
         <name>hive.prewarm.numcontainers</name>
-        <value>3</value>
+        <value>10</value>
     </property>
 
     <property>
@@ -518,7 +488,7 @@
 
     <property>
         <name>hive.security.authorization.enabled</name>
-        <value>true</value>
+        <value>false</value>
     </property>
 
     <property>
@@ -538,7 +508,7 @@
 
     <property>
         <name>hive.security.metastore.authorization.manager</name>
-        <value>org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider</value>
+        <value>org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider,org.apache.hadoop.hive.ql.security.authorization.MetaStoreAuthzAPIAuthorizerEmbedOnly</value>
     </property>
 
     <property>
@@ -563,7 +533,12 @@
 
     <property>
         <name>hive.server2.enable.doAs</name>
-        <value>false</value>
+        <value>true</value>
+    </property>
+
+    <property>
+        <name>hive.server2.enable.impersonation</name>
+        <value>true</value>
     </property>
 
     <property>
@@ -573,7 +548,7 @@
 
     <property>
         <name>hive.server2.logging.operation.log.location</name>
-        <value>/tmp/hive/operation_logs</value>
+        <value>${system:java.io.tmpdir}/${system:user.name}/operation_logs</value>
     </property>
 
     <property>
@@ -678,7 +653,7 @@
 
     <property>
         <name>hive.tez.container.size</name>
-        <value>1024</value>
+        <value>250</value>
     </property>
 
     <property>

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/examples/test_case_data/sandbox/mapred-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/mapred-site.xml b/examples/test_case_data/sandbox/mapred-site.xml
index be470f9..e90f594 100644
--- a/examples/test_case_data/sandbox/mapred-site.xml
+++ b/examples/test_case_data/sandbox/mapred-site.xml
@@ -18,7 +18,7 @@
 
     <property>
         <name>io.sort.mb</name>
-        <value>64</value>
+        <value>128</value>
     </property>
 
     <property>
@@ -27,13 +27,13 @@
     </property>
 
     <property>
-        <name>mapred.job.map.memory.mb</name>
-        <value>250</value>
+        <name>mapreduce.map.memory.mb</name>
+        <value>512</value>
     </property>
 
     <property>
-        <name>mapred.job.reduce.memory.mb</name>
-        <value>250</value>
+        <name>mapreduce.reduce.memory.mb</name>
+        <value>512</value>
     </property>
 
     <property>
@@ -48,9 +48,7 @@
 
     <property>
         <name>mapreduce.admin.user.env</name>
-        <value>
-            LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-amd64-64
-        </value>
+        <value>LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-amd64-64</value>
     </property>
 
     <property>
@@ -60,9 +58,7 @@
 
     <property>
         <name>mapreduce.application.classpath</name>
-        <value>
-            $PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:$PWD/mr-framework/hadoop/share/hadoop/tools/lib/*:/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.${hdp.version}.jar:/etc/hadoop/conf/secure
-        </value>
+        <value>/tmp/kylin/*,$HADOOP_CONF_DIR,/usr/hdp/${hdp.version}/hbase/lib/hbase-common.jar,/usr/hdp/current/hive-client/conf/,$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.${hdp.version}.jar:/usr/hdp/${hdp.version}/hadoop/lib/snappy-java-1.0.4.1.jar:/etc/hadoop/conf/secure</value>
     </property>
 
     <property>
@@ -81,18 +77,14 @@
     </property>
 
     <property>
-        <name>mapreduce.job.counters.max</name>
-        <value>130</value>
-    </property>
-
-    <property>
         <name>mapreduce.job.emit-timeline-data</name>
         <value>false</value>
     </property>
 
+    <!--the default value on hdp is 0.05, however for test environments we need to be conservative on resource -->
     <property>
         <name>mapreduce.job.reduce.slowstart.completedmaps</name>
-        <value>0.05</value>
+        <value>1</value>
     </property>
 
     <property>
@@ -116,28 +108,13 @@
     </property>
 
     <property>
-        <name>mapreduce.jobhistory.recovery.enable</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>mapreduce.jobhistory.recovery.store.class</name>
-        <value>org.apache.hadoop.mapreduce.v2.hs.HistoryServerLeveldbStateStoreService</value>
-    </property>
-
-    <property>
-        <name>mapreduce.jobhistory.recovery.store.leveldb.path</name>
-        <value>/hadoop/mapreduce/jhs</value>
-    </property>
-
-    <property>
         <name>mapreduce.jobhistory.webapp.address</name>
         <value>sandbox.hortonworks.com:19888</value>
     </property>
 
     <property>
         <name>mapreduce.map.java.opts</name>
-        <value>-Xmx1228m</value>
+        <value>-Xmx512m</value>
     </property>
 
     <property>
@@ -147,7 +124,7 @@
 
     <property>
         <name>mapreduce.map.memory.mb</name>
-        <value>1536</value>
+        <value>512</value>
     </property>
 
     <property>
@@ -182,7 +159,7 @@
 
     <property>
         <name>mapreduce.reduce.java.opts</name>
-        <value>-Xmx1638m</value>
+        <value>-Xmx200m</value>
     </property>
 
     <property>
@@ -192,7 +169,7 @@
 
     <property>
         <name>mapreduce.reduce.memory.mb</name>
-        <value>2048</value>
+        <value>512</value>
     </property>
 
     <property>
@@ -242,7 +219,7 @@
 
     <property>
         <name>mapreduce.task.io.sort.mb</name>
-        <value>859</value>
+        <value>128</value>
     </property>
 
     <property>
@@ -257,7 +234,7 @@
 
     <property>
         <name>yarn.app.mapreduce.am.command-opts</name>
-        <value>-Xmx819m -Dhdp.version=${hdp.version}</value>
+        <value>-Xmx512m</value>
     </property>
 
     <property>
@@ -267,7 +244,7 @@
 
     <property>
         <name>yarn.app.mapreduce.am.resource.mb</name>
-        <value>1024</value>
+        <value>512</value>
     </property>
 
     <property>
@@ -275,4 +252,4 @@
         <value>/user</value>
     </property>
 
-</configuration>
\ No newline at end of file
+</configuration>

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/examples/test_case_data/sandbox/yarn-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/yarn-site.xml b/examples/test_case_data/sandbox/yarn-site.xml
index ebdf44a..8256158 100644
--- a/examples/test_case_data/sandbox/yarn-site.xml
+++ b/examples/test_case_data/sandbox/yarn-site.xml
@@ -18,7 +18,7 @@
 
     <property>
         <name>hadoop.registry.rm.enabled</name>
-        <value>true</value>
+        <value>false</value>
     </property>
 
     <property>
@@ -28,29 +28,22 @@
 
     <property>
         <name>yarn.acl.enable</name>
-        <value>true</value>
+        <value>false</value>
     </property>
 
     <property>
         <name>yarn.admin.acl</name>
-        <value>*</value>
+        <value></value>
     </property>
 
     <property>
         <name>yarn.application.classpath</name>
-        <value>
-            $HADOOP_CONF_DIR,/usr/hdp/current/hadoop-client/*,/usr/hdp/current/hadoop-client/lib/*,/usr/hdp/current/hadoop-hdfs-client/*,/usr/hdp/current/hadoop-hdfs-client/lib/*,/usr/hdp/current/hadoop-yarn-client/*,/usr/hdp/current/hadoop-yarn-client/lib/*
-        </value>
-    </property>
-
-    <property>
-        <name>yarn.authorization-provider</name>
-        <value>org.apache.ranger.authorization.yarn.authorizer.RangerYarnAuthorizer</value>
+        <value>$HADOOP_CONF_DIR,/usr/hdp/current/hadoop-client/*,/usr/hdp/current/hadoop-client/lib/*,/usr/hdp/current/hadoop-hdfs-client/*,/usr/hdp/current/hadoop-hdfs-client/lib/*,/usr/hdp/current/hadoop-yarn-client/*,/usr/hdp/current/hadoop-yarn-client/lib/*</value>
     </property>
 
     <property>
         <name>yarn.client.nodemanager-connect.max-wait-ms</name>
-        <value>120000</value>
+        <value>60000</value>
     </property>
 
     <property>
@@ -79,11 +72,6 @@
     </property>
 
     <property>
-        <name>yarn.node-labels.enabled</name>
-        <value>false</value>
-    </property>
-
-    <property>
         <name>yarn.node-labels.fs-store.retry-policy-spec</name>
         <value>2000, 500</value>
     </property>
@@ -94,6 +82,11 @@
     </property>
 
     <property>
+        <name>yarn.node-labels.manager-class</name>
+        <value>org.apache.hadoop.yarn.server.resourcemanager.nodelabels.MemoryRMNodeLabelsManager</value>
+    </property>
+
+    <property>
         <name>yarn.nodemanager.address</name>
         <value>0.0.0.0:45454</value>
     </property>
@@ -105,7 +98,7 @@
 
     <property>
         <name>yarn.nodemanager.aux-services</name>
-        <value>mapreduce_shuffle,spark_shuffle</value>
+        <value>mapreduce_shuffle</value>
     </property>
 
     <property>
@@ -114,11 +107,6 @@
     </property>
 
     <property>
-        <name>yarn.nodemanager.aux-services.spark_shuffle.class</name>
-        <value>org.apache.spark.network.yarn.YarnShuffleService</value>
-    </property>
-
-    <property>
         <name>yarn.nodemanager.bind-host</name>
         <value>0.0.0.0</value>
     </property>
@@ -160,7 +148,7 @@
 
     <property>
         <name>yarn.nodemanager.health-checker.script.timeout-ms</name>
-        <value>120000</value>
+        <value>60000</value>
     </property>
 
     <property>
@@ -255,12 +243,12 @@
 
     <property>
         <name>yarn.nodemanager.resource.memory-mb</name>
-        <value>7168</value>
+        <value>9216</value>
     </property>
 
     <property>
         <name>yarn.nodemanager.resource.percentage-physical-cpu-limit</name>
-        <value>80</value>
+        <value>100</value>
     </property>
 
     <property>
@@ -349,11 +337,6 @@
     </property>
 
     <property>
-        <name>yarn.resourcemanager.scheduler.monitor.enable</name>
-        <value>false</value>
-    </property>
-
-    <property>
         <name>yarn.resourcemanager.state-store.max-completed-applications</name>
         <value>${yarn.resourcemanager.max-completed-applications}</value>
     </property>
@@ -385,7 +368,7 @@
 
     <property>
         <name>yarn.resourcemanager.webapp.https.address</name>
-        <value>sandbox.hortonworks.com:8090</value>
+        <value>localhost:8090</value>
     </property>
 
     <property>
@@ -425,7 +408,7 @@
 
     <property>
         <name>yarn.resourcemanager.zk-address</name>
-        <value>sandbox.hortonworks.com:2181</value>
+        <value>localhost:2181</value>
     </property>
 
     <property>
@@ -450,22 +433,12 @@
 
     <property>
         <name>yarn.scheduler.maximum-allocation-mb</name>
-        <value>7168</value>
-    </property>
-
-    <property>
-        <name>yarn.scheduler.maximum-allocation-vcores</name>
-        <value>3</value>
+        <value>9216</value>
     </property>
 
     <property>
         <name>yarn.scheduler.minimum-allocation-mb</name>
-        <value>1024</value>
-    </property>
-
-    <property>
-        <name>yarn.scheduler.minimum-allocation-vcores</name>
-        <value>1</value>
+        <value>1536</value>
     </property>
 
     <property>
@@ -494,41 +467,6 @@
     </property>
 
     <property>
-        <name>yarn.timeline-service.entity-group-fs-store.active-dir</name>
-        <value>/ats/active/</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.entity-group-fs-store.cleaner-interval-seconds</name>
-        <value>3600</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.entity-group-fs-store.done-dir</name>
-        <value>/ats/done/</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.entity-group-fs-store.group-id-plugin-classes</name>
-        <value>org.apache.tez.dag.history.logging.ats.TimelineCachePluginImpl</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.entity-group-fs-store.retain-seconds</name>
-        <value>604800</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.entity-group-fs-store.scan-interval-seconds</name>
-        <value>60</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.entity-group-fs-store.summary-store</name>
-        <value>org.apache.hadoop.yarn.server.timeline.RollingLevelDBTimelineStore</value>
-    </property>
-
-    <property>
         <name>yarn.timeline-service.generic-application-history.store-class</name>
         <value>org.apache.hadoop.yarn.server.applicationhistoryservice.NullApplicationHistoryStore</value>
     </property>
@@ -544,11 +482,6 @@
     </property>
 
     <property>
-        <name>yarn.timeline-service.leveldb-state-store.path</name>
-        <value>/hadoop/yarn/timeline</value>
-    </property>
-
-    <property>
         <name>yarn.timeline-service.leveldb-timeline-store.path</name>
         <value>/hadoop/yarn/timeline</value>
     </property>
@@ -574,23 +507,8 @@
     </property>
 
     <property>
-        <name>yarn.timeline-service.plugin.enabled</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.recovery.enabled</name>
-        <value>true</value>
-    </property>
-
-    <property>
-        <name>yarn.timeline-service.state-store-class</name>
-        <value>org.apache.hadoop.yarn.server.timeline.recovery.LeveldbTimelineStateStore</value>
-    </property>
-
-    <property>
         <name>yarn.timeline-service.store-class</name>
-        <value>org.apache.hadoop.yarn.server.timeline.EntityGroupFSTimelineStore</value>
+        <value>org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore</value>
     </property>
 
     <property>
@@ -604,11 +522,6 @@
     </property>
 
     <property>
-        <name>yarn.timeline-service.version</name>
-        <value>1.5</value>
-    </property>
-
-    <property>
         <name>yarn.timeline-service.webapp.address</name>
         <value>sandbox.hortonworks.com:8188</value>
     </property>
@@ -618,4 +531,4 @@
         <value>sandbox.hortonworks.com:8190</value>
     </property>
 
-</configuration>
\ No newline at end of file
+</configuration>

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
index 726d72f..d43bc1e 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
@@ -32,9 +32,11 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 
 import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HTable;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
@@ -58,7 +60,6 @@ import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
 import org.apache.kylin.source.ISource;
 import org.apache.kylin.source.SourceFactory;
 import org.apache.kylin.source.SourcePartition;
-import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.util.HBaseRegionSizeCalculator;
 import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
 import org.apache.kylin.tool.StorageCleanupJob;
@@ -95,10 +96,10 @@ public class BuildCubeWithEngine {
             logger.error("error", e);
             exitCode = 1;
         }
-
+        
         long millis = System.currentTimeMillis() - start;
         System.out.println("Time elapsed: " + (millis / 1000) + " sec - in " + BuildCubeWithEngine.class.getName());
-
+        
         System.exit(exitCode);
     }
 
@@ -358,10 +359,10 @@ public class BuildCubeWithEngine {
 
     @SuppressWarnings("unused")
     private void checkHFilesInHBase(CubeSegment segment) throws IOException {
-        try (Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl())) {
-            String tableName = segment.getStorageLocationIdentifier();
-
-            HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(tableName, conn);
+        Configuration conf = HBaseConfiguration.create(HadoopUtil.getCurrentConfiguration());
+        String tableName = segment.getStorageLocationIdentifier();
+        try (HTable table = new HTable(conf, tableName)) {
+            HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(table);
             Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
             long totalSize = 0;
             for (Long size : sizeMap.values()) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index bb7fde9..22e2eeb 100644
--- a/pom.xml
+++ b/pom.xml
@@ -46,15 +46,15 @@
         <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
 
         <!-- Hadoop versions -->
-        <hadoop2.version>2.7.1</hadoop2.version>
-        <yarn.version>2.7.1</yarn.version>
+        <hadoop2.version>2.6.0</hadoop2.version>
+        <yarn.version>2.6.0</yarn.version>
 
         <!-- Hive versions -->
-        <hive.version>1.2.1</hive.version>
-        <hive-hcatalog.version>1.2.1</hive-hcatalog.version>
+        <hive.version>0.14.0</hive.version>
+        <hive-hcatalog.version>0.14.0</hive-hcatalog.version>
 
         <!-- HBase versions -->
-        <hbase-hadoop2.version>1.1.1</hbase-hadoop2.version>
+        <hbase-hadoop2.version>0.98.8-hadoop2</hbase-hadoop2.version>
 
         <!-- Kafka versions -->
         <kafka.version>0.10.1.0</kafka.version>
@@ -72,7 +72,7 @@
 
         <!-- Hadoop Common deps, keep compatible with hadoop2.version -->
         <zookeeper.version>3.4.6</zookeeper.version>
-        <curator.version>2.7.1</curator.version>
+        <curator.version>2.6.0</curator.version>
         <jsr305.version>3.0.1</jsr305.version>
         <guava.version>14.0</guava.version>
         <jsch.version>0.1.53</jsch.version>
@@ -844,11 +844,6 @@
             <id>conjars</id>
             <url>http://conjars.org/repo/</url>
         </repository>
-
-        <repository>
-            <id>cloudera</id>
-            <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
-        </repository>
     </repositories>
 
     <build>
@@ -1202,106 +1197,6 @@
             </build>
         </profile>
         <profile>
-            <id>cdh5.7</id>
-            <properties>
-                <hadoop2.version>2.6.0-cdh5.7.0</hadoop2.version>
-                <yarn.version>2.6.0-cdh5.7.0</yarn.version>
-                <hive.version>1.1.0-cdh5.7.0</hive.version>
-                <hive-hcatalog.version>1.1.0-cdh5.7.0</hive-hcatalog.version>
-                <hbase-hadoop2.version>1.2.0-cdh5.7.0</hbase-hadoop2.version>
-                <zookeeper.version>3.4.5-cdh5.7.0</zookeeper.version>
-            </properties>
-            <build>
-                <plugins>
-                    <plugin>
-                        <groupId>org.apache.maven.plugins</groupId>
-                        <artifactId>maven-compiler-plugin</artifactId>
-                        <configuration>
-                            <fork>true</fork>
-                            <meminitial>1024m</meminitial>
-                            <maxmem>2048m</maxmem>
-                        </configuration>
-                    </plugin>
-
-                    <plugin>
-                        <groupId>org.apache.maven.plugins</groupId>
-                        <artifactId>maven-dependency-plugin</artifactId>
-                        <executions>
-                            <execution>
-                                <id>copy-jamm</id>
-                                <goals>
-                                    <goal>copy</goal>
-                                </goals>
-                                <phase>generate-test-resources</phase>
-                                <configuration>
-                                    <artifactItems>
-                                        <artifactItem>
-                                            <groupId>com.github.jbellis</groupId>
-                                            <artifactId>jamm</artifactId>
-                                            <outputDirectory>${project.build.testOutputDirectory}</outputDirectory>
-                                            <destFileName>jamm.jar</destFileName>
-                                        </artifactItem>
-                                    </artifactItems>
-                                </configuration>
-                            </execution>
-                        </executions>
-                    </plugin>
-
-                    <plugin>
-                        <groupId>org.jacoco</groupId>
-                        <artifactId>jacoco-maven-plugin</artifactId>
-                        <configuration>
-                            <append>true</append>
-                            <destFile>
-                                ${sonar.jacoco.reportPath}
-                            </destFile>
-                        </configuration>
-                        <executions>
-                            <execution>
-                                <id>pre-test</id>
-                                <goals>
-                                    <goal>prepare-agent</goal>
-                                </goals>
-                                <configuration>
-                                    <propertyName>surefireArgLine</propertyName>
-                                </configuration>
-                            </execution>
-                            <execution>
-                                <id>post-test</id>
-                                <phase>test</phase>
-                                <goals>
-                                    <goal>report</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                    </plugin>
-
-                    <plugin>
-                        <groupId>org.apache.maven.plugins</groupId>
-                        <artifactId>maven-surefire-plugin</artifactId>
-                        <version>2.19.1</version>
-                        <configuration>
-                            <reportsDirectory>${project.basedir}/../target/surefire-reports</reportsDirectory>
-                            <excludes>
-                                <exclude>**/IT*.java</exclude>
-                            </excludes>
-                            <systemProperties>
-                                <property>
-                                    <name>buildCubeUsingProvidedData</name>
-                                    <value>false</value>
-                                </property>
-                                <property>
-                                    <name>log4j.configuration</name>
-                                    <value>file:${project.basedir}/../build/conf/kylin-tools-log4j.properties</value>
-                                </property>
-                            </systemProperties>
-                            <argLine>-javaagent:${project.build.testOutputDirectory}/jamm.jar ${argLine} ${surefireArgLine}</argLine>
-                        </configuration>
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
-        <profile>
             <!-- This profile adds/overrides few features of the 'apache-release'
                  profile in the parent pom. -->
             <id>apache-release</id>

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
index 8095bf8..ea68855 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
@@ -20,7 +20,7 @@ package org.apache.kylin.rest.security;
 
 import java.io.IOException;
 
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
 
 /**
  */
@@ -36,6 +36,6 @@ public interface AclHBaseStorage {
 
     String prepareHBaseTable(Class<?> clazz) throws IOException;
 
-    Table getTable(String tableName) throws IOException;
+    HTableInterface getTable(String tableName) throws IOException;
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
index cc76b87..d9326f5 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
@@ -21,7 +21,7 @@ package org.apache.kylin.rest.security;
 import java.io.IOException;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.rest.service.AclService;
 import org.apache.kylin.rest.service.QueryService;
@@ -34,8 +34,8 @@ public class MockAclHBaseStorage implements AclHBaseStorage {
     private static final String aclTableName = "MOCK-ACL-TABLE";
     private static final String userTableName = "MOCK-USER-TABLE";
 
-    private Table mockedAclTable;
-    private Table mockedUserTable;
+    private HTableInterface mockedAclTable;
+    private HTableInterface mockedUserTable;
     private RealAclHBaseStorage realAcl;
 
     public MockAclHBaseStorage() {
@@ -65,7 +65,7 @@ public class MockAclHBaseStorage implements AclHBaseStorage {
     }
 
     @Override
-    public Table getTable(String tableName) throws IOException {
+    public HTableInterface getTable(String tableName) throws IOException {
         if (realAcl != null) {
             return realAcl.getTable(tableName);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java b/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
index 972eea9..d0aa0ed 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
@@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;
@@ -91,7 +91,7 @@ import com.google.protobuf.ServiceException;
  *     <li>remove some methods for loading data, checking values ...</li>
  * </ul>
  */
-public class MockHTable implements Table {
+public class MockHTable implements HTableInterface {
     private final String tableName;
     private final List<String> columnFamilies = new ArrayList<>();
 
@@ -114,6 +114,14 @@ public class MockHTable implements Table {
         this.columnFamilies.add(columnFamily);
     }
 
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public byte[] getTableName() {
+        return tableName.getBytes();
+    }
+
     @Override
     public TableName getName() {
         return null;
@@ -192,8 +200,8 @@ public class MockHTable implements Table {
     }
 
     @Override
-    public boolean[] existsAll(List<Get> list) throws IOException {
-        return new boolean[0];
+    public Boolean[] exists(List<Get> gets) throws IOException {
+        return new Boolean[0];
     }
 
     /**
@@ -298,6 +306,15 @@ public class MockHTable implements Table {
      * {@inheritDoc}
      */
     @Override
+    public Result getRowOrBefore(byte[] row, byte[] family) throws IOException {
+        // FIXME: implement
+        return null;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
     public ResultScanner getScanner(Scan scan) throws IOException {
         final List<Result> ret = new ArrayList<Result>();
         byte[] st = scan.getStartRow();
@@ -429,7 +446,7 @@ public class MockHTable implements Table {
              */
         }
         if (filter.hasFilterRow() && !filteredOnRowKey) {
-            filter.filterRow();
+            filter.filterRow(nkvs);
         }
         if (filter.filterRow() || filteredOnRowKey) {
             nkvs.clear();
@@ -518,11 +535,6 @@ public class MockHTable implements Table {
         return false;
     }
 
-    @Override
-    public boolean checkAndPut(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, byte[] bytes3, Put put) throws IOException {
-        return false;
-    }
-
     /**
      * {@inheritDoc}
      */
@@ -543,7 +555,7 @@ public class MockHTable implements Table {
                 continue;
             }
             for (KeyValue kv : delete.getFamilyMap().get(family)) {
-                if (kv.isDelete()) {
+                if (kv.isDeleteFamily()) {
                     data.get(row).get(kv.getFamily()).clear();
                 } else {
                     data.get(row).get(kv.getFamily()).remove(kv.getQualifier());
@@ -580,11 +592,6 @@ public class MockHTable implements Table {
         return false;
     }
 
-    @Override
-    public boolean checkAndDelete(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, byte[] bytes3, Delete delete) throws IOException {
-        return false;
-    }
-
     /**
      * {@inheritDoc}
      */
@@ -598,7 +605,7 @@ public class MockHTable implements Table {
      */
     @Override
     public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount) throws IOException {
-        return incrementColumnValue(row, family, qualifier, amount, null);
+        return incrementColumnValue(row, family, qualifier, amount, true);
     }
 
     @Override
@@ -610,6 +617,37 @@ public class MockHTable implements Table {
      * {@inheritDoc}
      */
     @Override
+    public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount, boolean writeToWAL) throws IOException {
+        if (check(row, family, qualifier, null)) {
+            Put put = new Put(row);
+            put.add(family, qualifier, Bytes.toBytes(amount));
+            put(put);
+            return amount;
+        }
+        long newValue = Bytes.toLong(data.get(row).get(family).get(qualifier).lastEntry().getValue()) + amount;
+        data.get(row).get(family).get(qualifier).put(System.currentTimeMillis(), Bytes.toBytes(newValue));
+        return newValue;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public boolean isAutoFlush() {
+        return true;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public void flushCommits() throws IOException {
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
     public void close() throws IOException {
     }
 
@@ -635,6 +673,29 @@ public class MockHTable implements Table {
      * {@inheritDoc}
      */
     @Override
+    public void setAutoFlush(boolean autoFlush) {
+        throw new NotImplementedException();
+
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public void setAutoFlush(boolean autoFlush, boolean clearBufferOnFail) {
+        throw new NotImplementedException();
+
+    }
+
+    @Override
+    public void setAutoFlushTo(boolean autoFlush) {
+        throw new NotImplementedException();
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
     public long getWriteBufferSize() {
         throw new NotImplementedException();
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
index d1a1384..1d520c4 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
@@ -21,8 +21,7 @@ package org.apache.kylin.rest.security;
 import java.io.IOException;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.rest.service.AclService;
 import org.apache.kylin.rest.service.QueryService;
@@ -59,11 +58,11 @@ public class RealAclHBaseStorage implements AclHBaseStorage {
     }
 
     @Override
-    public Table getTable(String tableName) throws IOException {
+    public HTableInterface getTable(String tableName) throws IOException {
         if (StringUtils.equals(tableName, aclTableName)) {
-            return HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(aclTableName));
+            return HBaseConnection.get(hbaseUrl).getTable(aclTableName);
         } else if (StringUtils.equals(tableName, userTableName)) {
-            return HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
+            return HBaseConnection.get(hbaseUrl).getTable(userTableName);
         } else {
             throw new IllegalStateException("getTable failed" + tableName);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java b/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
index 3e3efec..d693a67 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
@@ -33,7 +33,7 @@ import javax.annotation.PostConstruct;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
@@ -124,7 +124,7 @@ public class AclService implements MutableAclService {
     @Override
     public List<ObjectIdentity> findChildren(ObjectIdentity parentIdentity) {
         List<ObjectIdentity> oids = new ArrayList<ObjectIdentity>();
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
 
@@ -173,7 +173,7 @@ public class AclService implements MutableAclService {
     @Override
     public Map<ObjectIdentity, Acl> readAclsById(List<ObjectIdentity> oids, List<Sid> sids) throws NotFoundException {
         Map<ObjectIdentity, Acl> aclMaps = new HashMap<ObjectIdentity, Acl>();
-        Table htable = null;
+        HTableInterface htable = null;
         Result result = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
@@ -226,16 +226,17 @@ public class AclService implements MutableAclService {
         Authentication auth = SecurityContextHolder.getContext().getAuthentication();
         PrincipalSid sid = new PrincipalSid(auth);
 
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
 
             Put put = new Put(Bytes.toBytes(String.valueOf(objectIdentity.getIdentifier())));
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN), Bytes.toBytes(objectIdentity.getType()));
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN), sidSerializer.serialize(new SidInfo(sid)));
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN), Bytes.toBytes(true));
+            put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN), Bytes.toBytes(objectIdentity.getType()));
+            put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN), sidSerializer.serialize(new SidInfo(sid)));
+            put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN), Bytes.toBytes(true));
 
             htable.put(put);
+            htable.flushCommits();
 
             logger.debug("ACL of " + objectIdentity + " created successfully.");
         } catch (IOException e) {
@@ -249,7 +250,7 @@ public class AclService implements MutableAclService {
 
     @Override
     public void deleteAcl(ObjectIdentity objectIdentity, boolean deleteChildren) throws ChildrenExistException {
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
 
@@ -265,6 +266,7 @@ public class AclService implements MutableAclService {
             }
 
             htable.delete(delete);
+            htable.flushCommits();
 
             logger.debug("ACL of " + objectIdentity + " deleted successfully.");
         } catch (IOException e) {
@@ -282,7 +284,7 @@ public class AclService implements MutableAclService {
             throw e;
         }
 
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(aclTableName);
 
@@ -293,16 +295,17 @@ public class AclService implements MutableAclService {
             Put put = new Put(Bytes.toBytes(String.valueOf(acl.getObjectIdentity().getIdentifier())));
 
             if (null != acl.getParentAcl()) {
-                put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN), domainObjSerializer.serialize(new DomainObjectInfo(acl.getParentAcl().getObjectIdentity())));
+                put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN), domainObjSerializer.serialize(new DomainObjectInfo(acl.getParentAcl().getObjectIdentity())));
             }
 
             for (AccessControlEntry ace : acl.getEntries()) {
                 AceInfo aceInfo = new AceInfo(ace);
-                put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY), Bytes.toBytes(aceInfo.getSidInfo().getSid()), aceSerializer.serialize(aceInfo));
+                put.add(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY), Bytes.toBytes(aceInfo.getSidInfo().getSid()), aceSerializer.serialize(aceInfo));
             }
 
             if (!put.isEmpty()) {
                 htable.put(put);
+                htable.flushCommits();
 
                 logger.debug("ACL of " + acl.getObjectIdentity() + " updated successfully.");
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
index c8c87cb..d28c87c 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -27,7 +27,9 @@ import java.util.List;
 import java.util.Map;
 import java.util.WeakHashMap;
 
-import org.apache.hadoop.hbase.client.Connection;
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.HTable;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.cube.CubeInstance;
@@ -393,24 +395,33 @@ public class CubeService extends BasicService {
         if (htableInfoCache.containsKey(tableName)) {
             return htableInfoCache.get(tableName);
         }
-        Connection conn = HBaseConnection.get(this.getConfig().getStorageUrl());
+
+        Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
+        HTable table = null;
         HBaseResponse hr = null;
         long tableSize = 0;
         int regionCount = 0;
 
-        HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(tableName, conn);
-        Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
+        try {
+            table = new HTable(hconf, tableName);
 
-        for (long s : sizeMap.values()) {
-            tableSize += s;
-        }
+            HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(table);
+            Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
 
-        regionCount = sizeMap.size();
+            for (long s : sizeMap.values()) {
+                tableSize += s;
+            }
+
+            regionCount = sizeMap.size();
+
+            // Set response.
+            hr = new HBaseResponse();
+            hr.setTableSize(tableSize);
+            hr.setRegionCount(regionCount);
+        } finally {
+            IOUtils.closeQuietly(table);
+        }
 
-        // Set response.
-        hr = new HBaseResponse();
-        hr.setTableSize(tableSize);
-        hr.setRegionCount(regionCount);
         htableInfoCache.put(tableName, hr);
 
         return hr;

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
index 122b823..60fc751 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -49,11 +49,11 @@ import javax.sql.DataSource;
 import org.apache.calcite.avatica.ColumnMetaData.Rep;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.QueryContext;
 import org.apache.kylin.common.debug.BackdoorToggles;
@@ -164,13 +164,14 @@ public class QueryService extends BasicService {
         Query[] queryArray = new Query[queries.size()];
 
         byte[] bytes = querySerializer.serialize(queries.toArray(queryArray));
-        Table htable = null;
+        HTableInterface htable = null;
         try {
-            htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
+            htable = HBaseConnection.get(hbaseUrl).getTable(userTableName);
             Put put = new Put(Bytes.toBytes(creator));
-            put.addColumn(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
+            put.add(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
 
             htable.put(put);
+            htable.flushCommits();
         } finally {
             IOUtils.closeQuietly(htable);
         }
@@ -196,13 +197,14 @@ public class QueryService extends BasicService {
 
         Query[] queryArray = new Query[queries.size()];
         byte[] bytes = querySerializer.serialize(queries.toArray(queryArray));
-        Table htable = null;
+        HTableInterface htable = null;
         try {
-            htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
+            htable = HBaseConnection.get(hbaseUrl).getTable(userTableName);
             Put put = new Put(Bytes.toBytes(creator));
-            put.addColumn(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
+            put.add(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
 
             htable.put(put);
+            htable.flushCommits();
         } finally {
             IOUtils.closeQuietly(htable);
         }
@@ -214,12 +216,12 @@ public class QueryService extends BasicService {
         }
 
         List<Query> queries = new ArrayList<Query>();
-        Table htable = null;
+        HTableInterface htable = null;
         try {
-            org.apache.hadoop.hbase.client.Connection conn = HBaseConnection.get(hbaseUrl);
+            HConnection conn = HBaseConnection.get(hbaseUrl);
             HBaseConnection.createHTableIfNeeded(conn, userTableName, USER_QUERY_FAMILY);
 
-            htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
+            htable = conn.getTable(userTableName);
             Get get = new Get(Bytes.toBytes(creator));
             get.addFamily(Bytes.toBytes(USER_QUERY_FAMILY));
             Result result = htable.get(get);

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java b/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
index ab54882..07c7c6f 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
@@ -30,11 +30,11 @@ import javax.annotation.PostConstruct;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.rest.security.AclHBaseStorage;
@@ -72,7 +72,7 @@ public class UserService implements UserDetailsManager {
 
     @Override
     public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);
 
@@ -144,16 +144,16 @@ public class UserService implements UserDetailsManager {
 
     @Override
     public void updateUser(UserDetails user) {
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);
 
             Pair<byte[], byte[]> pair = userToHBaseRow(user);
             Put put = new Put(pair.getKey());
-
-            put.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN), pair.getSecond());
+            put.add(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN), pair.getSecond());
 
             htable.put(put);
+            htable.flushCommits();
         } catch (IOException e) {
             throw new RuntimeException(e.getMessage(), e);
         } finally {
@@ -163,13 +163,14 @@ public class UserService implements UserDetailsManager {
 
     @Override
     public void deleteUser(String username) {
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);
 
             Delete delete = new Delete(Bytes.toBytes(username));
 
             htable.delete(delete);
+            htable.flushCommits();
         } catch (IOException e) {
             throw new RuntimeException(e.getMessage(), e);
         } finally {
@@ -184,7 +185,7 @@ public class UserService implements UserDetailsManager {
 
     @Override
     public boolean userExists(String username) {
-        Table htable = null;
+        HTableInterface htable = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);
 
@@ -215,7 +216,7 @@ public class UserService implements UserDetailsManager {
         s.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN));
 
         List<UserDetails> all = new ArrayList<UserDetails>();
-        Table htable = null;
+        HTableInterface htable = null;
         ResultScanner scanner = null;
         try {
             htable = aclHBaseStorage.getTable(userTableName);

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
index 73f31c5..2351412 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
@@ -39,9 +39,9 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
 import org.apache.hadoop.hbase.util.Threads;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.kylin.common.KylinConfig;
@@ -63,7 +63,7 @@ public class HBaseConnection {
     private static final Logger logger = LoggerFactory.getLogger(HBaseConnection.class);
 
     private static final Map<String, Configuration> configCache = new ConcurrentHashMap<String, Configuration>();
-    private static final Map<String, Connection> connPool = new ConcurrentHashMap<String, Connection>();
+    private static final Map<String, HConnection> connPool = new ConcurrentHashMap<String, HConnection>();
     private static final ThreadLocal<Configuration> configThreadLocal = new ThreadLocal<>();
 
     private static ExecutorService coprocessorPool = null;
@@ -74,7 +74,7 @@ public class HBaseConnection {
             public void run() {
                 closeCoprocessorPool();
 
-                for (Connection conn : connPool.values()) {
+                for (HConnection conn : connPool.values()) {
                     try {
                         conn.close();
                     } catch (IOException e) {
@@ -143,7 +143,7 @@ public class HBaseConnection {
         // using a hbase:xxx URL is deprecated, instead hbase config is always loaded from hbase-site.xml in classpath
         if (!(StringUtils.isEmpty(url) || "hbase".equals(url)))
             throw new IllegalArgumentException("to use hbase storage, pls set 'kylin.storage.url=hbase' in kylin.properties");
-
+        
         Configuration conf = HBaseConfiguration.create(HadoopUtil.getCurrentConfiguration());
         addHBaseClusterNNHAConfiguration(conf);
 
@@ -207,9 +207,9 @@ public class HBaseConnection {
 
     // ============================================================================
 
-    // returned Connection can be shared by multiple threads and does not require close()
+    // returned HConnection can be shared by multiple threads and does not require close()
     @SuppressWarnings("resource")
-    public static Connection get(String url) {
+    public static HConnection get(String url) {
         // find configuration
         Configuration conf = configCache.get(url);
         if (conf == null) {
@@ -217,13 +217,13 @@ public class HBaseConnection {
             configCache.put(url, conf);
         }
 
-        Connection connection = connPool.get(url);
+        HConnection connection = connPool.get(url);
         try {
             while (true) {
                 // I don't use DCL since recreate a connection is not a big issue.
                 if (connection == null || connection.isClosed()) {
                     logger.info("connection is null or closed, creating a new one");
-                    connection = ConnectionFactory.createConnection(conf);
+                    connection = HConnectionManager.createConnection(conf);
                     connPool.put(url, connection);
                 }
 
@@ -242,8 +242,8 @@ public class HBaseConnection {
         return connection;
     }
 
-    public static boolean tableExists(Connection conn, String tableName) throws IOException {
-        Admin hbase = conn.getAdmin();
+    public static boolean tableExists(HConnection conn, String tableName) throws IOException {
+        HBaseAdmin hbase = new HBaseAdmin(conn);
         try {
             return hbase.tableExists(TableName.valueOf(tableName));
         } finally {
@@ -263,18 +263,18 @@ public class HBaseConnection {
         deleteTable(HBaseConnection.get(hbaseUrl), tableName);
     }
 
-    public static void createHTableIfNeeded(Connection conn, String table, String... families) throws IOException {
-        Admin hbase = conn.getAdmin();
-        TableName tableName = TableName.valueOf(table);
+    public static void createHTableIfNeeded(HConnection conn, String table, String... families) throws IOException {
+        HBaseAdmin hbase = new HBaseAdmin(conn);
+
         try {
             if (tableExists(conn, table)) {
                 logger.debug("HTable '" + table + "' already exists");
-                Set<String> existingFamilies = getFamilyNames(hbase.getTableDescriptor(tableName));
+                Set<String> existingFamilies = getFamilyNames(hbase.getTableDescriptor(TableName.valueOf(table)));
                 boolean wait = false;
                 for (String family : families) {
                     if (existingFamilies.contains(family) == false) {
                         logger.debug("Adding family '" + family + "' to HTable '" + table + "'");
-                        hbase.addColumn(tableName, newFamilyDescriptor(family));
+                        hbase.addColumn(table, newFamilyDescriptor(family));
                         // addColumn() is async, is there a way to wait it finish?
                         wait = true;
                     }
@@ -327,8 +327,8 @@ public class HBaseConnection {
         return fd;
     }
 
-    public static void deleteTable(Connection conn, String tableName) throws IOException {
-        Admin hbase = conn.getAdmin();
+    public static void deleteTable(HConnection conn, String tableName) throws IOException {
+        HBaseAdmin hbase = new HBaseAdmin(conn);
 
         try {
             if (!tableExists(conn, tableName)) {
@@ -338,10 +338,10 @@ public class HBaseConnection {
 
             logger.debug("delete HTable '" + tableName + "'");
 
-            if (hbase.isTableEnabled(TableName.valueOf(tableName))) {
-                hbase.disableTable(TableName.valueOf(tableName));
+            if (hbase.isTableEnabled(tableName)) {
+                hbase.disableTable(tableName);
             }
-            hbase.deleteTable(TableName.valueOf(tableName));
+            hbase.deleteTable(tableName);
 
             logger.debug("HTable '" + tableName + "' deleted");
         } finally {

http://git-wip-us.apache.org/repos/asf/kylin/blob/da9b080f/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
index 74ab017..a44de4f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
@@ -30,15 +30,14 @@ import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.FilterList;
@@ -69,7 +68,7 @@ public class HBaseResourceStore extends ResourceStore {
     final String tableNameBase;
     final String hbaseUrl;
 
-    Connection getConnection() throws IOException {
+    HConnection getConnection() throws IOException {
         return HBaseConnection.get(hbaseUrl);
     }
 
@@ -121,7 +120,7 @@ public class HBaseResourceStore extends ResourceStore {
         byte[] endRow = Bytes.toBytes(lookForPrefix);
         endRow[endRow.length - 1]++;
 
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        HTableInterface table = getConnection().getTable(getAllInOneTableName());
         Scan scan = new Scan(startRow, endRow);
         if ((filter != null && filter instanceof KeyOnlyFilter) == false) {
             scan.addColumn(B_FAMILY, B_COLUMN_TS);
@@ -238,12 +237,13 @@ public class HBaseResourceStore extends ResourceStore {
         IOUtils.copy(content, bout);
         bout.close();
 
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        HTableInterface table = getConnection().getTable(getAllInOneTableName());
         try {
             byte[] row = Bytes.toBytes(resPath);
             Put put = buildPut(resPath, ts, row, bout.toByteArray(), table);
 
             table.put(put);
+            table.flushCommits();
         } finally {
             IOUtils.closeQuietly(table);
         }
@@ -251,7 +251,7 @@ public class HBaseResourceStore extends ResourceStore {
 
     @Override
     protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldTS, long newTS) throws IOException, IllegalStateException {
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        HTableInterface table = getConnection().getTable(getAllInOneTableName());
         try {
             byte[] row = Bytes.toBytes(resPath);
             byte[] bOldTS = oldTS == 0 ? null : Bytes.toBytes(oldTS);
@@ -264,6 +264,8 @@ public class HBaseResourceStore extends ResourceStore {
                 throw new IllegalStateException("Overwriting conflict " + resPath + ", expect old TS " + oldTS + ", but it is " + real);
             }
 
+            table.flushCommits();
+
             return newTS;
         } finally {
             IOUtils.closeQuietly(table);
@@ -272,7 +274,7 @@ public class HBaseResourceStore extends ResourceStore {
 
     @Override
     protected void deleteResourceImpl(String resPath) throws IOException {
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        HTableInterface table = getConnection().getTable(getAllInOneTableName());
         try {
             boolean hdfsResourceExist = false;
             Result result = internalGetFromHTable(table, resPath, true, false);
@@ -285,6 +287,7 @@ public class HBaseResourceStore extends ResourceStore {
 
             Delete del = new Delete(Bytes.toBytes(resPath));
             table.delete(del);
+            table.flushCommits();
 
             if (hdfsResourceExist) { // remove hdfs cell value
                 Path redirectPath = bigCellHDFSPath(resPath);
@@ -305,7 +308,7 @@ public class HBaseResourceStore extends ResourceStore {
     }
 
     private Result getFromHTable(String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
-        Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
+        HTableInterface table = getConnection().getTable(getAllInOneTableName());
         try {
             return internalGetFromHTable(table, path, fetchContent, fetchTimestamp);
         } finally {
@@ -314,7 +317,7 @@ public class HBaseResourceStore extends ResourceStore {
 
     }
 
-    private Result internalGetFromHTable(Table table, String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
+    private Result internalGetFromHTable(HTableInterface table, String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
         byte[] rowkey = Bytes.toBytes(path);
 
         Get get = new Get(rowkey);
@@ -333,7 +336,7 @@ public class HBaseResourceStore extends ResourceStore {
         return exists ? result : null;
     }
 
-    private Path writeLargeCellToHdfs(String resPath, byte[] largeColumn, Table table) throws IOException {
+    private Path writeLargeCellToHdfs(String resPath, byte[] largeColumn, HTableInterface table) throws IOException {
         Path redirectPath = bigCellHDFSPath(resPath);
         FileSystem fileSystem = HadoopUtil.getWorkingFileSystem();
 
@@ -358,7 +361,7 @@ public class HBaseResourceStore extends ResourceStore {
         return redirectPath;
     }
 
-    private Put buildPut(String resPath, long ts, byte[] row, byte[] content, Table table) throws IOException {
+    private Put buildPut(String resPath, long ts, byte[] row, byte[] content, HTableInterface table) throws IOException {
         int kvSizeLimit = Integer.parseInt(getConnection().getConfiguration().get("hbase.client.keyvalue.maxsize", "10485760"));
         if (content.length > kvSizeLimit) {
             writeLargeCellToHdfs(resPath, content, table);
@@ -366,8 +369,8 @@ public class HBaseResourceStore extends ResourceStore {
         }
 
         Put put = new Put(row);
-        put.addColumn(B_FAMILY, B_COLUMN, content);
-        put.addColumn(B_FAMILY, B_COLUMN_TS, Bytes.toBytes(ts));
+        put.add(B_FAMILY, B_COLUMN, content);
+        put.add(B_FAMILY, B_COLUMN_TS, Bytes.toBytes(ts));
 
         return put;
     }


[05/13] kylin git commit: KYLIN-2384 keep old Number2BytesConverter for backward compatibility

Posted by li...@apache.org.
KYLIN-2384 keep old Number2BytesConverter for backward compatibility


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/a0dba296
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/a0dba296
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/a0dba296

Branch: refs/heads/master-hbase0.98
Commit: a0dba2962e70449fc8dad8cc770361581f00ad0d
Parents: f2feae2
Author: Li Yang <li...@apache.org>
Authored: Tue Feb 21 15:40:10 2017 +0800
Committer: Li Yang <li...@apache.org>
Committed: Tue Feb 21 15:40:10 2017 +0800

----------------------------------------------------------------------
 .../dict/NumberDictionaryForestBuilder.java      | 19 ++++++++++++++++---
 1 file changed, 16 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/a0dba296/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryForestBuilder.java
----------------------------------------------------------------------
diff --git a/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryForestBuilder.java b/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryForestBuilder.java
index 8b7026d..380cd1d 100644
--- a/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryForestBuilder.java
+++ b/core-dictionary/src/main/java/org/apache/kylin/dict/NumberDictionaryForestBuilder.java
@@ -23,15 +23,28 @@ package org.apache.kylin.dict;
  */
 public class NumberDictionaryForestBuilder extends TrieDictionaryForestBuilder<String> {
 
+    // keep this class for backward compatibility
+    public static class Number2BytesConverter extends org.apache.kylin.dict.Number2BytesConverter {
+        private static final long serialVersionUID = 1L;
+
+        public Number2BytesConverter() {
+            super();
+        }
+
+        public Number2BytesConverter(int maxDigitsBeforeDecimalPoint) {
+            super(maxDigitsBeforeDecimalPoint);
+        }
+    }
+
     public NumberDictionaryForestBuilder() {
-        super(new Number2BytesConverter(Number2BytesConverter.MAX_DIGITS_BEFORE_DECIMAL_POINT));
+        super(new org.apache.kylin.dict.Number2BytesConverter(Number2BytesConverter.MAX_DIGITS_BEFORE_DECIMAL_POINT));
     }
 
     public NumberDictionaryForestBuilder(int baseId) {
-        super(new Number2BytesConverter(Number2BytesConverter.MAX_DIGITS_BEFORE_DECIMAL_POINT), 0);
+        super(new org.apache.kylin.dict.Number2BytesConverter(Number2BytesConverter.MAX_DIGITS_BEFORE_DECIMAL_POINT), 0);
     }
 
     public NumberDictionaryForestBuilder(int baseId, int maxTrieSizeMB) {
-        super(new Number2BytesConverter(Number2BytesConverter.MAX_DIGITS_BEFORE_DECIMAL_POINT), 0, maxTrieSizeMB);
+        super(new org.apache.kylin.dict.Number2BytesConverter(Number2BytesConverter.MAX_DIGITS_BEFORE_DECIMAL_POINT), 0, maxTrieSizeMB);
     }
 }


[04/13] kylin git commit: KYLIN-2456 fix select * from .. join ..

Posted by li...@apache.org.
KYLIN-2456 fix select * from .. join ..


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/f2feae22
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/f2feae22
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/f2feae22

Branch: refs/heads/master-hbase0.98
Commit: f2feae22a6cb5940213d040b96181b89bc51c46c
Parents: e77a848
Author: Li Yang <li...@apache.org>
Authored: Tue Feb 21 15:10:58 2017 +0800
Committer: Li Yang <li...@apache.org>
Committed: Tue Feb 21 15:10:58 2017 +0800

----------------------------------------------------------------------
 .../calcite/sql2rel/SqlToRelConverter.java      | 56 +++++++++++---------
 .../apache/kylin/query/ITKylinQueryTest.java    |  1 +
 2 files changed, 33 insertions(+), 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/f2feae22/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
----------------------------------------------------------------------
diff --git a/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java b/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
index cc9c9a1..a7be94f 100644
--- a/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
+++ b/atopcalcite/src/main/java/org/apache/calcite/sql2rel/SqlToRelConverter.java
@@ -16,6 +16,25 @@
  */
 package org.apache.calcite.sql2rel;
 
+import static org.apache.calcite.sql.SqlUtil.stripAs;
+import static org.apache.calcite.util.Static.RESOURCE;
+
+import java.lang.reflect.Type;
+import java.math.BigDecimal;
+import java.util.AbstractList;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Deque;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+
 import org.apache.calcite.avatica.util.Spaces;
 import org.apache.calcite.linq4j.Ord;
 import org.apache.calcite.plan.Convention;
@@ -159,6 +178,7 @@ import org.apache.calcite.util.NumberUtil;
 import org.apache.calcite.util.Pair;
 import org.apache.calcite.util.Util;
 import org.apache.calcite.util.trace.CalciteTrace;
+import org.slf4j.Logger;
 
 import com.google.common.base.Function;
 import com.google.common.base.Preconditions;
@@ -170,27 +190,6 @@ import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
-import org.slf4j.Logger;
-
-import java.lang.reflect.Type;
-import java.math.BigDecimal;
-import java.util.AbstractList;
-import java.util.ArrayDeque;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Deque;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
-
-import static org.apache.calcite.sql.SqlUtil.stripAs;
-import static org.apache.calcite.util.Static.RESOURCE;
-
 /*
  * The code has synced with calcite. Hope one day, we could remove the hardcode override point.
  * OVERRIDE POINT:
@@ -602,7 +601,7 @@ public class SqlToRelConverter {
          *   LogicalSort (optional)
          *    |- LogicalProject
          *        |- LogicalFilter (optional)
-         *            |- OLAPTableScan
+         *            |- OLAPTableScan or LogicalJoin
          */
         LogicalProject rootPrj = null;
         LogicalSort rootSort = null;
@@ -617,8 +616,8 @@ public class SqlToRelConverter {
 
         RelNode input = rootPrj.getInput();
         if (!(//
-                input.getClass().getSimpleName().equals("OLAPTableScan")//
-                || (input.getClass().getSimpleName().equals("LogicalFilter") && input.getInput(0).getClass().getSimpleName().equals("OLAPTableScan"))//
+                isAmong(input, "OLAPTableScan", "LogicalJoin")//
+                || (isAmong(input, "LogicalFilter") && isAmong(input.getInput(0), "OLAPTableScan", "LogicalJoin"))//
              ))
             return root;
 
@@ -654,6 +653,15 @@ public class SqlToRelConverter {
         return root;
     }
 
+    private boolean isAmong(RelNode rel, String... names) {
+        String simpleName = rel.getClass().getSimpleName();
+        for (String n : names) {
+            if (simpleName.equals(n))
+                return true;
+        }
+        return false;
+    }
+
     private static boolean isStream(SqlNode query) {
         return query instanceof SqlSelect
                 && ((SqlSelect) query).isKeywordPresent(SqlSelectKeyword.STREAM);

http://git-wip-us.apache.org/repos/asf/kylin/blob/f2feae22/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
index 4590e60..d30371a 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
@@ -396,6 +396,7 @@ public class ITKylinQueryTest extends KylinTestBase {
     public void testSelectStarColumnCount() throws Exception {
         execAndCompColumnCount("select * from test_kylin_fact limit 10", 11);
         execAndCompColumnCount("select * from test_kylin_fact", 11);
+        execAndCompColumnCount("select * from     test_kylin_fact left join edw.test_cal_dt on test_kylin_fact.cal_dt = edw.test_cal_dt.CAL_DT    limit 10", 13);
     }
 
     @Test


[06/13] kylin git commit: minor, remove jets3t dependency

Posted by li...@apache.org.
minor, remove jets3t dependency


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/36f716f3
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/36f716f3
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/36f716f3

Branch: refs/heads/master-hbase0.98
Commit: 36f716f3afd5ccca6639e58ec767b60773dd96a3
Parents: a0dba29
Author: Li Yang <li...@apache.org>
Authored: Tue Feb 21 16:01:56 2017 +0800
Committer: Li Yang <li...@apache.org>
Committed: Tue Feb 21 16:01:56 2017 +0800

----------------------------------------------------------------------
 pom.xml | 4 ++++
 1 file changed, 4 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/36f716f3/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index a103ae0..bb7fde9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -346,6 +346,10 @@
                         <groupId>javax.servlet</groupId>
                         <artifactId>servlet-api</artifactId>
                     </exclusion>
+                    <exclusion>
+                        <groupId>net.java.dev.jets3t</groupId>
+                        <artifactId>jets3t</artifactId>
+                    </exclusion>
                 </exclusions>
             </dependency>
             <dependency>


[09/13] kylin git commit: KYLIN-2440 add test case

Posted by li...@apache.org.
KYLIN-2440 add test case


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/4c6fd2cc
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/4c6fd2cc
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/4c6fd2cc

Branch: refs/heads/master-hbase0.98
Commit: 4c6fd2cc5911a5221ba03b230cff65a706f9f9aa
Parents: 78a082f
Author: Li Yang <li...@apache.org>
Authored: Tue Feb 21 18:18:00 2017 +0800
Committer: Li Yang <li...@apache.org>
Committed: Tue Feb 21 18:18:00 2017 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/cube/CubeDescTest.java     | 31 ++++++++++++++++----
 1 file changed, 25 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/4c6fd2cc/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java b/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
index 20ee43e..8eb3bb0 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/CubeDescTest.java
@@ -21,6 +21,7 @@ package org.apache.kylin.cube;
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
 
 import java.io.File;
 import java.util.ArrayList;
@@ -48,7 +49,6 @@ import org.apache.kylin.metadata.model.TblColRef;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
-import org.junit.Ignore;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
@@ -103,7 +103,6 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
         this.cleanupTestMetadata();
     }
 
-    @Ignore ("To enable spark in IT, the inner cube removed the percentile measure, so ignore this test")
     @Test
     public void testCiCube() {
         CubeDescManager mgr = CubeDescManager.getInstance(getTestConfig());
@@ -122,10 +121,14 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
             assertArrayEquals(ld.getDerived(), id.getDerived());
         }
         
-        assertEquals(lc.getMeasures().size(), ic.getMeasures().size());
-        for (int i = 0, n = lc.getMeasures().size(); i < n; i++) {
-            MeasureDesc lm = lc.getMeasures().get(i);
-            MeasureDesc im = ic.getMeasures().get(i);
+        // To enable spark in IT, the inner cube removed the percentile measure, so ignore that particular measure
+        List<MeasureDesc> lcMeasures = dropPercentile(lc.getMeasures());
+        List<MeasureDesc> icMeasures = ic.getMeasures();
+        
+        assertEquals(lcMeasures.size(), icMeasures.size());
+        for (int i = 0, n = lcMeasures.size(); i < n; i++) {
+            MeasureDesc lm = lcMeasures.get(i);
+            MeasureDesc im = icMeasures.get(i);
             assertEquals(lm.getName(), im.getName());
             assertEquals(lm.getFunction().getFullExpression(), im.getFunction().getFullExpression());
             assertEquals(lm.getFunction().getReturnType(), im.getFunction().getReturnType());
@@ -140,8 +143,24 @@ public class CubeDescTest extends LocalFileMetadataTestCase {
             assertArrayEquals(lag.getSelectRule().hierarchy_dims, iag.getSelectRule().hierarchy_dims);
             assertArrayEquals(lag.getSelectRule().joint_dims, iag.getSelectRule().joint_dims);
         }
+        
+        assertEquals(lc.listAllColumnDescs().size(), ic.listAllColumnDescs().size());
+        assertEquals(lc.listAllColumns().size(), ic.listAllColumns().size());
+        
+        // test KYLIN-2440
+        assertTrue(lc.listAllColumns().contains(lc.getModel().findColumn("SELLER_ACCOUNT.ACCOUNT_ID")));
+        assertTrue(ic.listAllColumns().contains(ic.getModel().findColumn("SELLER_ACCOUNT.ACCOUNT_ID")));
     }
     
+    private List<MeasureDesc> dropPercentile(List<MeasureDesc> measures) {
+        ArrayList<MeasureDesc> result = new ArrayList<>();
+        for (MeasureDesc m : measures) {
+            if (!m.getFunction().getExpression().toUpperCase().contains("PERCENTILE"))
+                result.add(m);
+        }
+        return result;
+    }
+
     @Test
     public void testGoodInit() throws Exception {
         CubeDesc cubeDesc = CubeDescManager.getInstance(getTestConfig()).getCubeDesc(CUBE_WITH_SLR_DESC);


[07/13] kylin git commit: minor, refactor ITKylinQueryTest, move select * queries to sql files

Posted by li...@apache.org.
minor, refactor ITKylinQueryTest, move select * queries to sql files


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/7ce3a153
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/7ce3a153
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/7ce3a153

Branch: refs/heads/master-hbase0.98
Commit: 7ce3a153cdb63b874e68b29518cd6bb0836a9d57
Parents: 36f716f
Author: Li Yang <li...@apache.org>
Authored: Tue Feb 21 17:29:13 2017 +0800
Committer: Li Yang <li...@apache.org>
Committed: Tue Feb 21 17:29:13 2017 +0800

----------------------------------------------------------------------
 .../apache/kylin/query/ITKylinQueryTest.java    |  9 +---
 .../org/apache/kylin/query/KylinTestBase.java   | 43 +++++++++++++++-----
 .../resources/query/sql_verifyCount/query01.sql | 19 +++++++++
 .../query/sql_verifyCount/query01.sql.expected  |  2 +
 .../resources/query/sql_verifyCount/query03.sql | 22 ++++++++++
 .../query/sql_verifyCount/query03.sql.expected  |  2 +
 .../query/sql_verifyCount/query04.sql.expected  |  1 +
 7 files changed, 79 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/7ce3a153/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
index d30371a..842ce40 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/ITKylinQueryTest.java
@@ -210,7 +210,7 @@ public class ITKylinQueryTest extends KylinTestBase {
 
     @Test
     public void testVerifyCountQuery() throws Exception {
-        verifyResultRowCount(getQueryFolderPrefix() + "src/test/resources/query/sql_verifyCount");
+        verifyResultRowColCount(getQueryFolderPrefix() + "src/test/resources/query/sql_verifyCount");
     }
 
     @Test
@@ -393,13 +393,6 @@ public class ITKylinQueryTest extends KylinTestBase {
     }
 
     @Test
-    public void testSelectStarColumnCount() throws Exception {
-        execAndCompColumnCount("select * from test_kylin_fact limit 10", 11);
-        execAndCompColumnCount("select * from test_kylin_fact", 11);
-        execAndCompColumnCount("select * from     test_kylin_fact left join edw.test_cal_dt on test_kylin_fact.cal_dt = edw.test_cal_dt.CAL_DT    limit 10", 13);
-    }
-
-    @Test
     public void testPercentileQuery() throws Exception {
         batchExecuteQuery(getQueryFolderPrefix() + "src/test/resources/query/sql_percentile");
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/7ce3a153/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
index d83ad75..402aaa0 100644
--- a/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
+++ b/kylin-it/src/test/java/org/apache/kylin/query/KylinTestBase.java
@@ -46,6 +46,7 @@ import java.util.logging.LogManager;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
+import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.query.relnode.OLAPContext;
 import org.apache.kylin.query.routing.rules.RemoveBlackoutRealizationsRule;
@@ -233,7 +234,7 @@ public class KylinTestBase {
             for (int i = 0; i < columnNames.length; i++) {
                 columnNames[i] = queryTable.getTableMetaData().getColumns()[i].getColumnName();
             }
-            
+
             queryTable = new SortedTable(queryTable, columnNames);
         }
         if (PRINT_RESULT)
@@ -337,7 +338,7 @@ public class KylinTestBase {
         }
     }
 
-    protected void verifyResultRowCount(String queryFolder) throws Exception {
+    protected void verifyResultRowColCount(String queryFolder) throws Exception {
         printInfo("---------- verify result count in folder: " + queryFolder);
 
         List<File> sqlFiles = getFilesFromFolder(new File(queryFolder), ".sql");
@@ -346,7 +347,9 @@ public class KylinTestBase {
             String sql = getTextFromFile(sqlFile);
 
             File expectResultFile = new File(sqlFile.getParent(), sqlFile.getName() + ".expected");
-            int expectRowCount = Integer.parseInt(Files.readFirstLine(expectResultFile, Charset.defaultCharset()));
+            Pair<Integer, Integer> pair = getExpectedRowAndCol(expectResultFile);
+            int expectRowCount = pair.getFirst();
+            int expectColCount = pair.getSecond();
 
             // execute Kylin
             printInfo("Query Result from Kylin - " + queryName + "  (" + queryFolder + ")");
@@ -354,11 +357,29 @@ public class KylinTestBase {
             ITable kylinTable = executeQuery(kylinConn, queryName, sql, false);
 
             // compare the result
-            Assert.assertEquals(queryName, expectRowCount, kylinTable.getRowCount());
-            // assertTableEquals(expectRowCount, kylinTable.getRowCount());
+            if (expectRowCount >= 0)
+                Assert.assertEquals(queryName, expectRowCount, kylinTable.getRowCount());
+
+            if (expectColCount >= 0)
+                Assert.assertEquals(queryName, expectColCount, kylinTable.getTableMetaData().getColumns().length);
         }
     }
 
+    private Pair<Integer, Integer> getExpectedRowAndCol(File expectResultFile) throws IOException {
+        List<String> lines = Files.readLines(expectResultFile, Charset.forName("UTF-8"));
+        int row = -1;
+        int col = -1;
+        try {
+            row = Integer.parseInt(lines.get(0).trim());
+        } catch (Exception ex) {
+        }
+        try {
+            col = Integer.parseInt(lines.get(1).trim());
+        } catch (Exception ex) {
+        }
+        return Pair.newPair(row, col);
+    }
+
     protected void verifyResultContent(String queryFolder) throws Exception {
         printInfo("---------- verify result content in folder: " + queryFolder);
 
@@ -370,7 +391,7 @@ public class KylinTestBase {
             File expectResultFile = new File(sqlFile.getParent(), sqlFile.getName() + ".expected.xml");
             IDataSet expect = new FlatXmlDataSetBuilder().build(expectResultFile);
             // Get expected table named "expect". FIXME Only support default table name
-            ITable expectTable = expect.getTable("expect");   
+            ITable expectTable = expect.getTable("expect");
 
             // execute Kylin
             printInfo("Query Result from Kylin - " + queryName + "  (" + queryFolder + ")");
@@ -421,13 +442,13 @@ public class KylinTestBase {
     protected void execAndCompColumnCount(String input, int expectedColumnCount) throws Exception {
         printInfo("---------- test column count: " + input);
         Set<String> sqlSet = ImmutableSet.of(input);
-        
+
         for (String sql : sqlSet) {
             // execute Kylin
             printInfo("Query Result from Kylin - " + sql);
             IDatabaseConnection kylinConn = new DatabaseConnection(cubeConnection);
             ITable kylinTable = executeQuery(kylinConn, sql, sql, false);
-            
+
             try {
                 // compare the result
                 Assert.assertEquals(expectedColumnCount, kylinTable.getTableMetaData().getColumns().length);
@@ -437,7 +458,7 @@ public class KylinTestBase {
             }
         }
     }
-    
+
     protected void execLimitAndValidate(String queryFolder) throws Exception {
         printInfo("---------- test folder: " + new File(queryFolder).getAbsolutePath());
 
@@ -543,13 +564,13 @@ public class KylinTestBase {
             assertTableEquals(h2Table, kylinTable);
         }
     }
-    
+
     protected void assertTableEquals(ITable h2Table, ITable kylinTable) throws DatabaseUnitException {
         HackedDbUnitAssert dbUnit = new HackedDbUnitAssert();
         dbUnit.hackIgnoreIntBigIntMismatch();
         dbUnit.assertEquals(h2Table, kylinTable);
     }
-    
+
     protected void assertTableContains(ITable h2Table, ITable kylinTable) throws DatabaseUnitException {
         HackedDbUnitAssert dbUnit = new HackedDbUnitAssert();
         dbUnit.hackIgnoreIntBigIntMismatch();

http://git-wip-us.apache.org/repos/asf/kylin/blob/7ce3a153/kylin-it/src/test/resources/query/sql_verifyCount/query01.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql_verifyCount/query01.sql b/kylin-it/src/test/resources/query/sql_verifyCount/query01.sql
new file mode 100644
index 0000000..ae25894
--- /dev/null
+++ b/kylin-it/src/test/resources/query/sql_verifyCount/query01.sql
@@ -0,0 +1,19 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+
+select * from test_kylin_fact

http://git-wip-us.apache.org/repos/asf/kylin/blob/7ce3a153/kylin-it/src/test/resources/query/sql_verifyCount/query01.sql.expected
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql_verifyCount/query01.sql.expected b/kylin-it/src/test/resources/query/sql_verifyCount/query01.sql.expected
new file mode 100644
index 0000000..86242f2
--- /dev/null
+++ b/kylin-it/src/test/resources/query/sql_verifyCount/query01.sql.expected
@@ -0,0 +1,2 @@
+*
+11

http://git-wip-us.apache.org/repos/asf/kylin/blob/7ce3a153/kylin-it/src/test/resources/query/sql_verifyCount/query03.sql
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql_verifyCount/query03.sql b/kylin-it/src/test/resources/query/sql_verifyCount/query03.sql
new file mode 100644
index 0000000..93ef871
--- /dev/null
+++ b/kylin-it/src/test/resources/query/sql_verifyCount/query03.sql
@@ -0,0 +1,22 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+
+select * 
+from
+test_kylin_fact left join edw.test_cal_dt on test_kylin_fact.cal_dt = edw.test_cal_dt.CAL_DT
+limit 10

http://git-wip-us.apache.org/repos/asf/kylin/blob/7ce3a153/kylin-it/src/test/resources/query/sql_verifyCount/query03.sql.expected
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql_verifyCount/query03.sql.expected b/kylin-it/src/test/resources/query/sql_verifyCount/query03.sql.expected
new file mode 100644
index 0000000..093e327
--- /dev/null
+++ b/kylin-it/src/test/resources/query/sql_verifyCount/query03.sql.expected
@@ -0,0 +1,2 @@
+10
+13

http://git-wip-us.apache.org/repos/asf/kylin/blob/7ce3a153/kylin-it/src/test/resources/query/sql_verifyCount/query04.sql.expected
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/resources/query/sql_verifyCount/query04.sql.expected b/kylin-it/src/test/resources/query/sql_verifyCount/query04.sql.expected
index 29d6383..71011db 100644
--- a/kylin-it/src/test/resources/query/sql_verifyCount/query04.sql.expected
+++ b/kylin-it/src/test/resources/query/sql_verifyCount/query04.sql.expected
@@ -1 +1,2 @@
 100
+11