You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by li...@apache.org on 2016/09/07 03:41:59 UTC
[01/10] kylin git commit: KYLIN-1992 Clear ThreadLocal Contexts when
query failed before scaning HBase [Forced Update!]
Repository: kylin
Updated Branches:
refs/heads/1.5.x-HBase1.x f6ee0e7c8 -> b70684e66 (forced update)
KYLIN-1992 Clear ThreadLocal Contexts when query failed before scaning HBase
Signed-off-by: Hongbin Ma <ma...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/663820f1
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/663820f1
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/663820f1
Branch: refs/heads/1.5.x-HBase1.x
Commit: 663820f1a267c9ff156ffbe018ac1d95a1ede7cc
Parents: 67bcec2
Author: kangkaisen <ka...@live.com>
Authored: Thu Sep 1 16:02:42 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Sun Sep 4 21:10:51 2016 +0800
----------------------------------------------------------------------
.../src/main/java/org/apache/kylin/rest/service/QueryService.java | 2 ++
1 file changed, 2 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/663820f1/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
index 3acaeb8..df296cf 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -316,6 +316,8 @@ public class QueryService extends BasicService {
parameters.put(OLAPContext.PRM_USER_AUTHEN_INFO, userInfo);
parameters.put(OLAPContext.PRM_ACCEPT_PARTIAL_RESULT, String.valueOf(sqlRequest.isAcceptPartial()));
OLAPContext.setParameters(parameters);
+ // force clear the query context before a new query
+ OLAPContext.clearThreadLocalContexts();
return execute(correctedSql, sqlRequest);
[09/10] kylin git commit: KYLIN-1528 Create a branch for v1.5 with
HBase 1.x API
Posted by li...@apache.org.
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
index 9b487a7..4a4f2a3 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
@@ -25,7 +25,8 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.regionserver.BloomType;
@@ -78,7 +79,8 @@ public class CubeHTableUtil {
tableDesc.setValue(IRealizationConstants.HTableSegmentTag, cubeSegment.toString());
Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
- HBaseAdmin admin = new HBaseAdmin(conf);
+ Connection conn = HBaseConnection.get(kylinConfig.getStorageUrl());
+ Admin admin = conn.getAdmin();
try {
if (User.isHBaseSecurityEnabled(conf)) {
@@ -91,7 +93,7 @@ public class CubeHTableUtil {
tableDesc.addFamily(cf);
}
- if (admin.tableExists(tableName)) {
+ if (admin.tableExists(TableName.valueOf(tableName))) {
// admin.disableTable(tableName);
// admin.deleteTable(tableName);
throw new RuntimeException("HBase table " + tableName + " exists!");
@@ -100,7 +102,7 @@ public class CubeHTableUtil {
DeployCoprocessorCLI.deployCoprocessor(tableDesc);
admin.createTable(tableDesc, splitKeys);
- Preconditions.checkArgument(admin.isTableAvailable(tableName), "table " + tableName + " created, but is not available due to some reasons");
+ Preconditions.checkArgument(admin.isTableAvailable(TableName.valueOf(tableName)), "table " + tableName + " created, but is not available due to some reasons");
logger.info("create hbase table " + tableName + " done.");
} finally {
admin.close();
@@ -109,8 +111,7 @@ public class CubeHTableUtil {
}
public static void deleteHTable(TableName tableName) throws IOException {
- Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
- HBaseAdmin admin = new HBaseAdmin(conf);
+ Admin admin = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl()).getAdmin();
try {
if (admin.tableExists(tableName)) {
logger.info("disabling hbase table " + tableName);
@@ -125,8 +126,7 @@ public class CubeHTableUtil {
/** create a HTable that has the same performance settings as normal cube table, for benchmark purpose */
public static void createBenchmarkHTable(TableName tableName, String cfName) throws IOException {
- Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
- HBaseAdmin admin = new HBaseAdmin(conf);
+ Admin admin = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl()).getAdmin();
try {
if (admin.tableExists(tableName)) {
logger.info("disabling hbase table " + tableName);
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
index 7aecd7e..9dc9715 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
@@ -28,9 +28,10 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.engine.mr.HadoopUtil;
import org.apache.kylin.job.exception.ExecuteException;
import org.apache.kylin.job.execution.AbstractExecutable;
@@ -99,19 +100,21 @@ public class DeprecatedGCStep extends AbstractExecutable {
List<String> oldTables = getOldHTables();
if (oldTables != null && oldTables.size() > 0) {
String metadataUrlPrefix = KylinConfig.getInstanceFromEnv().getMetadataUrlPrefix();
- Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
- HBaseAdmin admin = null;
+ Admin admin = null;
try {
- admin = new HBaseAdmin(conf);
+
+ Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+ admin = conn.getAdmin();
+
for (String table : oldTables) {
- if (admin.tableExists(table)) {
- HTableDescriptor tableDescriptor = admin.getTableDescriptor(Bytes.toBytes(table));
+ if (admin.tableExists(TableName.valueOf(table))) {
+ HTableDescriptor tableDescriptor = admin.getTableDescriptor(TableName.valueOf(table));
String host = tableDescriptor.getValue(IRealizationConstants.HTableTag);
if (metadataUrlPrefix.equalsIgnoreCase(host)) {
- if (admin.isTableEnabled(table)) {
- admin.disableTable(table);
+ if (admin.isTableEnabled(TableName.valueOf(table))) {
+ admin.disableTable(TableName.valueOf(table));
}
- admin.deleteTable(table);
+ admin.deleteTable(TableName.valueOf(table));
logger.debug("Dropped HBase table " + table);
output.append("Dropped HBase table " + table + " \n");
} else {
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
index 4fe7748..65cf205 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
@@ -38,8 +38,8 @@ import java.util.List;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.util.ImmutableBitSet;
import org.apache.kylin.cube.CubeSegment;
import org.apache.kylin.cube.cuboid.Cuboid;
@@ -64,7 +64,7 @@ public class HBaseCuboidWriter implements ICuboidWriter {
private final List<KeyValueCreator> keyValueCreators;
private final int nColumns;
- private final HTableInterface hTable;
+ private final Table hTable;
private final CubeDesc cubeDesc;
private final CubeSegment cubeSegment;
private final Object[] measureValues;
@@ -73,7 +73,7 @@ public class HBaseCuboidWriter implements ICuboidWriter {
private AbstractRowKeyEncoder rowKeyEncoder;
private byte[] keybuf;
- public HBaseCuboidWriter(CubeSegment segment, HTableInterface hTable) {
+ public HBaseCuboidWriter(CubeSegment segment, Table hTable) {
this.keyValueCreators = Lists.newArrayList();
this.cubeSegment = segment;
this.cubeDesc = cubeSegment.getCubeDesc();
@@ -132,7 +132,6 @@ public class HBaseCuboidWriter implements ICuboidWriter {
long t = System.currentTimeMillis();
if (hTable != null) {
hTable.put(puts);
- hTable.flushCommits();
}
logger.info("commit total " + puts.size() + " puts, totally cost:" + (System.currentTimeMillis() - t) + "ms");
puts.clear();
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
index 9adaf24..e1e2cba 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
@@ -27,7 +27,8 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.persistence.ResourceStore;
import org.apache.kylin.cube.CubeSegment;
@@ -56,7 +57,7 @@ public class HBaseStreamingOutput implements IStreamingOutput {
try {
CubeSegment cubeSegment = (CubeSegment) buildable;
- final HTableInterface hTable;
+ final Table hTable;
hTable = createHTable(cubeSegment);
List<ICuboidWriter> cuboidWriters = Lists.newArrayList();
cuboidWriters.add(new HBaseCuboidWriter(cubeSegment, hTable));
@@ -88,10 +89,10 @@ public class HBaseStreamingOutput implements IStreamingOutput {
}
}
- private HTableInterface createHTable(final CubeSegment cubeSegment) throws IOException {
+ private Table createHTable(final CubeSegment cubeSegment) throws IOException {
final String hTableName = cubeSegment.getStorageLocationIdentifier();
CubeHTableUtil.createHTable(cubeSegment, null);
- final HTableInterface hTable = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl()).getTable(hTableName);
+ final Table hTable = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl()).getTable(TableName.valueOf(hTableName));
logger.info("hTable:" + hTableName + " for segment:" + cubeSegment.getName() + " created!");
return hTable;
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
index 5b2441c..2f7e164 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
@@ -24,11 +24,11 @@ import java.util.Collections;
import java.util.List;
import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.job.exception.ExecuteException;
import org.apache.kylin.job.execution.AbstractExecutable;
import org.apache.kylin.job.execution.ExecutableContext;
@@ -69,19 +69,20 @@ public class MergeGCStep extends AbstractExecutable {
List<String> oldTables = getOldHTables();
if (oldTables != null && oldTables.size() > 0) {
String metadataUrlPrefix = KylinConfig.getInstanceFromEnv().getMetadataUrlPrefix();
- Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
- HBaseAdmin admin = null;
+ Admin admin = null;
try {
- admin = new HBaseAdmin(conf);
+ Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+ admin = conn.getAdmin();
+
for (String table : oldTables) {
- if (admin.tableExists(table)) {
- HTableDescriptor tableDescriptor = admin.getTableDescriptor(Bytes.toBytes(table));
+ if (admin.tableExists(TableName.valueOf(table))) {
+ HTableDescriptor tableDescriptor = admin.getTableDescriptor(TableName.valueOf((table)));
String host = tableDescriptor.getValue(IRealizationConstants.HTableTag);
if (metadataUrlPrefix.equalsIgnoreCase(host)) {
- if (admin.isTableEnabled(table)) {
- admin.disableTable(table);
+ if (admin.isTableEnabled(TableName.valueOf(table))) {
+ admin.disableTable(TableName.valueOf(table));
}
- admin.deleteTable(table);
+ admin.deleteTable(TableName.valueOf(table));
logger.debug("Dropped htable: " + table);
output.append("HBase table " + table + " is dropped. \n");
} else {
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
index a150607..56f867a 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
@@ -21,9 +21,11 @@ package org.apache.kylin.storage.hbase.util;
import java.io.IOException;
import org.apache.commons.cli.Options;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.AbstractApplication;
import org.apache.kylin.common.util.OptionsHelper;
import org.apache.kylin.metadata.realization.IRealizationConstants;
@@ -38,8 +40,8 @@ public class CleanHtableCLI extends AbstractApplication {
protected static final Logger logger = LoggerFactory.getLogger(CleanHtableCLI.class);
private void clean() throws IOException {
- Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
- HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
+ Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+ Admin hbaseAdmin = conn.getAdmin();
for (HTableDescriptor descriptor : hbaseAdmin.listTables()) {
String name = descriptor.getNameAsString().toLowerCase();
@@ -50,7 +52,7 @@ public class CleanHtableCLI extends AbstractApplication {
System.out.println();
descriptor.setValue(IRealizationConstants.HTableOwner, "DL-eBay-Kylin@ebay.com");
- hbaseAdmin.modifyTable(descriptor.getNameAsString(), descriptor);
+ hbaseAdmin.modifyTable(TableName.valueOf(descriptor.getNameAsString()), descriptor);
}
}
hbaseAdmin.close();
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
index 3066fb5..45c6b91 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
@@ -33,12 +33,13 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.persistence.JsonSerializer;
import org.apache.kylin.common.persistence.RawResource;
@@ -86,7 +87,7 @@ public class CubeMigrationCLI {
private static ResourceStore srcStore;
private static ResourceStore dstStore;
private static FileSystem hdfsFS;
- private static HBaseAdmin hbaseAdmin;
+ private static Admin hbaseAdmin;
public static final String ACL_INFO_FAMILY = "i";
private static final String ACL_TABLE_NAME = "_acl";
@@ -130,8 +131,8 @@ public class CubeMigrationCLI {
checkAndGetHbaseUrl();
- Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
- hbaseAdmin = new HBaseAdmin(conf);
+ Connection conn = HBaseConnection.get(srcConfig.getStorageUrl());
+ hbaseAdmin = conn.getAdmin();
hdfsFS = FileSystem.get(new Configuration());
@@ -229,6 +230,7 @@ public class CubeMigrationCLI {
operations.add(new Opt(OptType.COPY_DICT_OR_SNAPSHOT, new Object[] { item, cube.getName() }));
}
}
+
private static void addCubeAndModelIntoProject(CubeInstance srcCube, String cubeName, String projectName) throws IOException {
String projectResPath = ProjectInstance.concatResourcePath(projectName);
if (!dstStore.exists(projectResPath))
@@ -322,8 +324,8 @@ public class CubeMigrationCLI {
switch (opt.type) {
case CHANGE_HTABLE_HOST: {
- String tableName = (String) opt.params[0];
- HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
+ TableName tableName = TableName.valueOf((String) opt.params[0]);
+ HTableDescriptor desc = hbaseAdmin.getTableDescriptor(tableName);
hbaseAdmin.disableTable(tableName);
desc.setValue(IRealizationConstants.HTableTag, dstConfig.getMetadataUrlPrefix());
hbaseAdmin.modifyTable(tableName, desc);
@@ -445,11 +447,11 @@ public class CubeMigrationCLI {
Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
ProjectInstance project = dstStore.getResource(projectResPath, ProjectInstance.class, projectSerializer);
String projUUID = project.getUuid();
- HTableInterface srcAclHtable = null;
- HTableInterface destAclHtable = null;
+ Table srcAclHtable = null;
+ Table destAclHtable = null;
try {
- srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
- destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
+ srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(TableName.valueOf(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+ destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
// cube acl
Result result = srcAclHtable.get(new Get(Bytes.toBytes(cubeId)));
@@ -469,7 +471,6 @@ public class CubeMigrationCLI {
destAclHtable.put(put);
}
}
- destAclHtable.flushCommits();
} finally {
IOUtils.closeQuietly(srcAclHtable);
IOUtils.closeQuietly(destAclHtable);
@@ -500,8 +501,8 @@ public class CubeMigrationCLI {
switch (opt.type) {
case CHANGE_HTABLE_HOST: {
- String tableName = (String) opt.params[0];
- HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
+ TableName tableName = TableName.valueOf((String) opt.params[0]);
+ HTableDescriptor desc = hbaseAdmin.getTableDescriptor(tableName);
hbaseAdmin.disableTable(tableName);
desc.setValue(IRealizationConstants.HTableTag, srcConfig.getMetadataUrlPrefix());
hbaseAdmin.modifyTable(tableName, desc);
@@ -535,13 +536,12 @@ public class CubeMigrationCLI {
case COPY_ACL: {
String cubeId = (String) opt.params[0];
String modelId = (String) opt.params[1];
- HTableInterface destAclHtable = null;
+ Table destAclHtable = null;
try {
- destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME);
+ destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
destAclHtable.delete(new Delete(Bytes.toBytes(cubeId)));
destAclHtable.delete(new Delete(Bytes.toBytes(modelId)));
- destAclHtable.flushCommits();
} finally {
IOUtils.closeQuietly(destAclHtable);
}
@@ -558,7 +558,7 @@ public class CubeMigrationCLI {
}
}
- private static void updateMeta(KylinConfig config){
+ private static void updateMeta(KylinConfig config) {
String[] nodes = config.getRestServers();
for (String node : nodes) {
RestClient restClient = new RestClient(node);
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
index 295750a..efda4e4 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
@@ -26,10 +26,10 @@ import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.OptionsHelper;
import org.apache.kylin.cube.CubeInstance;
@@ -60,7 +60,7 @@ public class CubeMigrationCheckCLI {
private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("The name of cube migrated").create("cube");
private KylinConfig dstCfg;
- private HBaseAdmin hbaseAdmin;
+ private Admin hbaseAdmin;
private List<String> issueExistHTables;
private List<String> inconsistentHTables;
@@ -128,9 +128,8 @@ public class CubeMigrationCheckCLI {
this.dstCfg = kylinConfig;
this.ifFix = isFix;
- Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
- hbaseAdmin = new HBaseAdmin(conf);
-
+ Connection conn = HBaseConnection.get(kylinConfig.getStorageUrl());
+ hbaseAdmin = conn.getAdmin();
issueExistHTables = Lists.newArrayList();
inconsistentHTables = Lists.newArrayList();
}
@@ -187,10 +186,10 @@ public class CubeMigrationCheckCLI {
String[] sepNameList = segFullName.split(",");
HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0]));
logger.info("Change the host of htable " + sepNameList[0] + "belonging to cube " + sepNameList[1] + " from " + desc.getValue(IRealizationConstants.HTableTag) + " to " + dstCfg.getMetadataUrlPrefix());
- hbaseAdmin.disableTable(sepNameList[0]);
+ hbaseAdmin.disableTable(TableName.valueOf(sepNameList[0]));
desc.setValue(IRealizationConstants.HTableTag, dstCfg.getMetadataUrlPrefix());
- hbaseAdmin.modifyTable(sepNameList[0], desc);
- hbaseAdmin.enableTable(sepNameList[0]);
+ hbaseAdmin.modifyTable(TableName.valueOf(sepNameList[0]), desc);
+ hbaseAdmin.enableTable(TableName.valueOf(sepNameList[0]));
}
} else {
logger.info("------ Inconsistent HTables Needed To Be Fixed ------");
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
index a1193e7..0aff276 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
@@ -40,7 +40,8 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.KylinVersion;
@@ -76,7 +77,8 @@ public class DeployCoprocessorCLI {
KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
FileSystem fileSystem = FileSystem.get(hconf);
- HBaseAdmin hbaseAdmin = new HBaseAdmin(hconf);
+ Connection conn = HBaseConnection.get(kylinConfig.getStorageUrl());
+ Admin hbaseAdmin = conn.getAdmin();
String localCoprocessorJar;
if ("default".equals(args[0])) {
@@ -159,10 +161,10 @@ public class DeployCoprocessorCLI {
public static void deployCoprocessor(HTableDescriptor tableDesc) {
try {
initHTableCoprocessor(tableDesc);
- logger.info("hbase table " + tableDesc.getName() + " deployed with coprocessor.");
+ logger.info("hbase table " + tableDesc.getTableName() + " deployed with coprocessor.");
} catch (Exception ex) {
- logger.error("Error deploying coprocessor on " + tableDesc.getName(), ex);
+ logger.error("Error deploying coprocessor on " + tableDesc.getTableName(), ex);
logger.error("Will try creating the table without coprocessor.");
}
}
@@ -184,9 +186,9 @@ public class DeployCoprocessorCLI {
desc.addCoprocessor(CubeObserverClass, hdfsCoprocessorJar, 1002, null);
}
- public static void resetCoprocessor(String tableName, HBaseAdmin hbaseAdmin, Path hdfsCoprocessorJar) throws IOException {
+ public static void resetCoprocessor(String tableName, Admin hbaseAdmin, Path hdfsCoprocessorJar) throws IOException {
logger.info("Disable " + tableName);
- hbaseAdmin.disableTable(tableName);
+ hbaseAdmin.disableTable(TableName.valueOf(tableName));
logger.info("Unset coprocessor on " + tableName);
HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
@@ -214,13 +216,13 @@ public class DeployCoprocessorCLI {
desc.setValue(IRealizationConstants.HTableGitTag, commitInfo);
}
- hbaseAdmin.modifyTable(tableName, desc);
+ hbaseAdmin.modifyTable(TableName.valueOf(tableName), desc);
logger.info("Enable " + tableName);
- hbaseAdmin.enableTable(tableName);
+ hbaseAdmin.enableTable(TableName.valueOf(tableName));
}
- private static List<String> resetCoprocessorOnHTables(HBaseAdmin hbaseAdmin, Path hdfsCoprocessorJar, List<String> tableNames) throws IOException {
+ private static List<String> resetCoprocessorOnHTables(Admin hbaseAdmin, Path hdfsCoprocessorJar, List<String> tableNames) throws IOException {
List<String> processed = new ArrayList<String>();
for (String tableName : tableNames) {
@@ -331,7 +333,7 @@ public class DeployCoprocessorCLI {
return coprocessorDir;
}
- private static Set<String> getCoprocessorJarPaths(HBaseAdmin hbaseAdmin, List<String> tableNames) throws IOException {
+ private static Set<String> getCoprocessorJarPaths(Admin hbaseAdmin, List<String> tableNames) throws IOException {
HashSet<String> result = new HashSet<String>();
for (String tableName : tableNames) {
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
index a5a85fa..29ca7b2 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/ExtendCubeToHybridCLI.java
@@ -26,10 +26,11 @@ import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.persistence.JsonSerializer;
import org.apache.kylin.common.persistence.ResourceStore;
@@ -231,9 +232,9 @@ public class ExtendCubeToHybridCLI {
Serializer<ProjectInstance> projectSerializer = new JsonSerializer<ProjectInstance>(ProjectInstance.class);
ProjectInstance project = store.getResource(projectResPath, ProjectInstance.class, projectSerializer);
String projUUID = project.getUuid();
- HTableInterface aclHtable = null;
+ Table aclHtable = null;
try {
- aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(kylinConfig.getMetadataUrlPrefix() + "_acl");
+ aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(TableName.valueOf(kylinConfig.getMetadataUrlPrefix() + "_acl"));
// cube acl
Result result = aclHtable.get(new Get(Bytes.toBytes(origCubeId)));
@@ -253,7 +254,6 @@ public class ExtendCubeToHybridCLI {
aclHtable.put(put);
}
}
- aclHtable.flushCommits();
} finally {
IOUtils.closeQuietly(aclHtable);
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
index 86ba22f..dd5f8fa 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
@@ -28,13 +28,13 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.common.util.Pair;
@@ -75,7 +75,7 @@ public class GridTableHBaseBenchmark {
System.out.println("Testing grid table scanning, hit ratio " + hitRatio + ", index ratio " + indexRatio);
String hbaseUrl = "hbase"; // use hbase-site.xml on classpath
- HConnection conn = HBaseConnection.get(hbaseUrl);
+ Connection conn = HBaseConnection.get(hbaseUrl);
createHTableIfNeeded(conn, TEST_TABLE);
prepareData(conn);
@@ -91,10 +91,10 @@ public class GridTableHBaseBenchmark {
}
- private static void testColumnScan(HConnection conn, List<Pair<Integer, Integer>> colScans) throws IOException {
+ private static void testColumnScan(Connection conn, List<Pair<Integer, Integer>> colScans) throws IOException {
Stats stats = new Stats("COLUMN_SCAN");
- HTableInterface table = conn.getTable(TEST_TABLE);
+ Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
try {
stats.markStart();
@@ -122,20 +122,20 @@ public class GridTableHBaseBenchmark {
}
}
- private static void testRowScanNoIndexFullScan(HConnection conn, boolean[] hits) throws IOException {
+ private static void testRowScanNoIndexFullScan(Connection conn, boolean[] hits) throws IOException {
fullScan(conn, hits, new Stats("ROW_SCAN_NO_IDX_FULL"));
}
- private static void testRowScanNoIndexSkipScan(HConnection conn, boolean[] hits) throws IOException {
+ private static void testRowScanNoIndexSkipScan(Connection conn, boolean[] hits) throws IOException {
jumpScan(conn, hits, new Stats("ROW_SCAN_NO_IDX_SKIP"));
}
- private static void testRowScanWithIndex(HConnection conn, boolean[] hits) throws IOException {
+ private static void testRowScanWithIndex(Connection conn, boolean[] hits) throws IOException {
jumpScan(conn, hits, new Stats("ROW_SCAN_IDX"));
}
- private static void fullScan(HConnection conn, boolean[] hits, Stats stats) throws IOException {
- HTableInterface table = conn.getTable(TEST_TABLE);
+ private static void fullScan(Connection conn, boolean[] hits, Stats stats) throws IOException {
+ Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
try {
stats.markStart();
@@ -156,11 +156,11 @@ public class GridTableHBaseBenchmark {
}
}
- private static void jumpScan(HConnection conn, boolean[] hits, Stats stats) throws IOException {
+ private static void jumpScan(Connection conn, boolean[] hits, Stats stats) throws IOException {
final int jumpThreshold = 6; // compensate for Scan() overhead, totally by experience
- HTableInterface table = conn.getTable(TEST_TABLE);
+ Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
try {
stats.markStart();
@@ -204,8 +204,8 @@ public class GridTableHBaseBenchmark {
}
}
- private static void prepareData(HConnection conn) throws IOException {
- HTableInterface table = conn.getTable(TEST_TABLE);
+ private static void prepareData(Connection conn) throws IOException {
+ Table table = conn.getTable(TableName.valueOf(TEST_TABLE));
try {
// check how many rows existing
@@ -258,8 +258,8 @@ public class GridTableHBaseBenchmark {
return bytes;
}
- private static void createHTableIfNeeded(HConnection conn, String tableName) throws IOException {
- HBaseAdmin hbase = new HBaseAdmin(conn);
+ private static void createHTableIfNeeded(Connection conn, String tableName) throws IOException {
+ Admin hbase = conn.getAdmin();
try {
boolean tableExist = false;
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
index 6749d6c..940d64a 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
@@ -24,9 +24,11 @@ import java.util.List;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.AbstractApplication;
import org.apache.kylin.common.util.OptionsHelper;
import org.apache.kylin.metadata.realization.IRealizationConstants;
@@ -55,8 +57,8 @@ public class HBaseClean extends AbstractApplication {
private void cleanUp() {
try {
// get all kylin hbase tables
- Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
- HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
+ Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+ Admin hbaseAdmin = conn.getAdmin();
String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
List<String> allTablesNeedToBeDropped = Lists.newArrayList();
@@ -71,12 +73,12 @@ public class HBaseClean extends AbstractApplication {
// drop tables
for (String htableName : allTablesNeedToBeDropped) {
logger.info("Deleting HBase table " + htableName);
- if (hbaseAdmin.tableExists(htableName)) {
- if (hbaseAdmin.isTableEnabled(htableName)) {
- hbaseAdmin.disableTable(htableName);
+ if (hbaseAdmin.tableExists(TableName.valueOf(htableName))) {
+ if (hbaseAdmin.isTableEnabled(TableName.valueOf(htableName))) {
+ hbaseAdmin.disableTable(TableName.valueOf(htableName));
}
- hbaseAdmin.deleteTable(htableName);
+ hbaseAdmin.deleteTable(TableName.valueOf(htableName));
logger.info("Deleted HBase table " + htableName);
} else {
logger.info("HBase table" + htableName + " does not exist");
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
index 346c3a2..3ae411b 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseRegionSizeCalculator.java
@@ -23,6 +23,7 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
+import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
@@ -30,12 +31,15 @@ import java.util.TreeSet;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus;
-import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.RegionLoad;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.RegionLocator;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.kylin.common.util.Pair;
import org.slf4j.Logger;
@@ -57,30 +61,31 @@ public class HBaseRegionSizeCalculator {
/**
* Computes size of each region for table and given column families.
* */
- public HBaseRegionSizeCalculator(HTable table) throws IOException {
- this(table, new HBaseAdmin(table.getConfiguration()));
- }
-
- /** Constructor for unit testing */
- HBaseRegionSizeCalculator(HTable table, HBaseAdmin hBaseAdmin) throws IOException {
+ public HBaseRegionSizeCalculator(String tableName, Connection hbaseConnection) throws IOException {
+ Table table = null;
+ Admin admin = null;
try {
+ table = hbaseConnection.getTable(TableName.valueOf(tableName));
+ admin = hbaseConnection.getAdmin();
+
if (!enabled(table.getConfiguration())) {
logger.info("Region size calculation disabled.");
return;
}
- logger.info("Calculating region sizes for table \"" + new String(table.getTableName()) + "\".");
+ logger.info("Calculating region sizes for table \"" + table.getName() + "\".");
// Get regions for table.
- Set<HRegionInfo> tableRegionInfos = table.getRegionLocations().keySet();
+ RegionLocator regionLocator = hbaseConnection.getRegionLocator(table.getName());
+ List<HRegionLocation> regionLocationList = regionLocator.getAllRegionLocations();
Set<byte[]> tableRegions = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
- for (HRegionInfo regionInfo : tableRegionInfos) {
- tableRegions.add(regionInfo.getRegionName());
+ for (HRegionLocation hRegionLocation : regionLocationList) {
+ tableRegions.add(hRegionLocation.getRegionInfo().getRegionName());
}
- ClusterStatus clusterStatus = hBaseAdmin.getClusterStatus();
+ ClusterStatus clusterStatus = admin.getClusterStatus();
Collection<ServerName> servers = clusterStatus.getServers();
final long megaByte = 1024L * 1024L;
@@ -104,7 +109,7 @@ public class HBaseRegionSizeCalculator {
}
}
} finally {
- hBaseAdmin.close();
+ admin.close();
}
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
index 266f7e7..a2f60d4 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
@@ -23,9 +23,10 @@ import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.metadata.realization.IRealizationConstants;
import org.apache.kylin.storage.hbase.HBaseConnection;
@@ -42,8 +43,8 @@ public class HBaseUsage {
Map<String, List<String>> envs = Maps.newHashMap();
// get all kylin hbase tables
- Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
- HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
+ Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+ Admin hbaseAdmin = conn.getAdmin();
String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
for (HTableDescriptor desc : tableDescriptors) {
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
index f30f2c9..542df39 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
@@ -31,15 +31,15 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HConnectionManager;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
+import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.storage.hbase.HBaseConnection;
import org.slf4j.Logger;
@@ -57,11 +57,11 @@ public class HbaseStreamingInput {
private static final byte[] QN = "C".getBytes();
public static void createTable(String tableName) throws IOException {
- HConnection conn = getConnection();
- HBaseAdmin hadmin = new HBaseAdmin(conn);
+ Connection conn = getConnection();
+ Admin hadmin = conn.getAdmin();
try {
- boolean tableExist = hadmin.tableExists(tableName);
+ boolean tableExist = hadmin.tableExists(TableName.valueOf(tableName));
if (tableExist) {
logger.info("HTable '" + tableName + "' already exists");
return;
@@ -118,8 +118,8 @@ public class HbaseStreamingInput {
e.printStackTrace();
}
- HConnection conn = getConnection();
- HTableInterface table = conn.getTable(tableName);
+ Connection conn = getConnection();
+ Table table = conn.getTable(TableName.valueOf(tableName));
byte[] key = new byte[8 + 4];//time + id
@@ -134,7 +134,7 @@ public class HbaseStreamingInput {
Bytes.putInt(key, 8, i);
Put put = new Put(key);
byte[] cell = randomBytes(CELL_SIZE);
- put.add(CF, QN, cell);
+ put.addColumn(CF, QN, cell);
buffer.add(put);
}
table.put(buffer);
@@ -169,8 +169,8 @@ public class HbaseStreamingInput {
}
Random r = new Random();
- HConnection conn = getConnection();
- HTableInterface table = conn.getTable(tableName);
+ Connection conn = getConnection();
+ Table table = conn.getTable(TableName.valueOf(tableName));
long leftBound = getFirstKeyTime(table);
long rightBound = System.currentTimeMillis();
@@ -205,7 +205,7 @@ public class HbaseStreamingInput {
}
}
- private static long getFirstKeyTime(HTableInterface table) throws IOException {
+ private static long getFirstKeyTime(Table table) throws IOException {
long startTime = 0;
Scan scan = new Scan();
@@ -223,8 +223,8 @@ public class HbaseStreamingInput {
}
- private static HConnection getConnection() throws IOException {
- return HConnectionManager.createConnection(HBaseConnection.getCurrentHBaseConfiguration());
+ private static Connection getConnection() throws IOException {
+ return HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
}
private static String formatTime(long time) {
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
index ca1a060..ea05ab2 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
@@ -23,10 +23,11 @@ import java.io.IOException;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.AbstractApplication;
import org.apache.kylin.common.util.OptionsHelper;
import org.apache.kylin.engine.mr.common.BatchConstants;
@@ -50,8 +51,8 @@ public class HtableAlterMetadataCLI extends AbstractApplication {
String metadataValue;
private void alter() throws IOException {
- Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
- HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
+ Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+ Admin hbaseAdmin = conn.getAdmin();
HTableDescriptor table = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
hbaseAdmin.disableTable(table.getTableName());
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
index 8ff5b0f..df4e912 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
@@ -30,10 +30,14 @@ import org.apache.commons.cli.Options;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.AbstractApplication;
import org.apache.kylin.common.util.OptionsHelper;
import org.apache.kylin.metadata.realization.IRealizationConstants;
+import org.apache.kylin.storage.hbase.HBaseConnection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -52,9 +56,9 @@ public class OrphanHBaseCleanJob extends AbstractApplication {
Set<String> metastoreWhitelistSet = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
private void cleanUnusedHBaseTables(Configuration conf) throws IOException {
-
+ Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
// get all kylin hbase tables
- HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
+ Admin hbaseAdmin = conn.getAdmin();
String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
List<String> allTablesNeedToBeDropped = new ArrayList<String>();
@@ -73,12 +77,13 @@ public class OrphanHBaseCleanJob extends AbstractApplication {
// drop tables
for (String htableName : allTablesNeedToBeDropped) {
logger.info("Deleting HBase table " + htableName);
- if (hbaseAdmin.tableExists(htableName)) {
- if (hbaseAdmin.isTableEnabled(htableName)) {
- hbaseAdmin.disableTable(htableName);
+ TableName tableName = TableName.valueOf(htableName);
+ if (hbaseAdmin.tableExists(tableName)) {
+ if (hbaseAdmin.isTableEnabled(tableName)) {
+ hbaseAdmin.disableTable(tableName);
}
- hbaseAdmin.deleteTable(htableName);
+ hbaseAdmin.deleteTable(tableName);
logger.info("Deleted HBase table " + htableName);
} else {
logger.info("HBase table" + htableName + " does not exist");
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
index 58ef7cb..63b5a42 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/PingHBaseCLI.java
@@ -21,12 +21,13 @@ package org.apache.kylin.storage.hbase.util;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HConnectionManager;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.token.TokenUtil;
import org.apache.hadoop.security.UserGroupInformation;
@@ -57,12 +58,12 @@ public class PingHBaseCLI {
Scan scan = new Scan();
int limit = 20;
- HConnection conn = null;
- HTableInterface table = null;
+ Connection conn = null;
+ Table table = null;
ResultScanner scanner = null;
try {
- conn = HConnectionManager.createConnection(hconf);
- table = conn.getTable(hbaseTable);
+ conn = ConnectionFactory.createConnection(hconf);
+ table = conn.getTable(TableName.valueOf(hbaseTable));
scanner = table.getScanner(scan);
int count = 0;
for (Result r : scanner) {
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
index 01edb1f..db516bb 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
@@ -22,11 +22,12 @@ import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HConnectionManager;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.common.util.BytesUtil;
import org.apache.kylin.storage.hbase.HBaseConnection;
@@ -70,8 +71,8 @@ public class RowCounterCLI {
logger.info("My Scan " + scan.toString());
- HConnection conn = HConnectionManager.createConnection(conf);
- HTableInterface tableInterface = conn.getTable(htableName);
+ Connection conn = ConnectionFactory.createConnection(conf);
+ Table tableInterface = conn.getTable(TableName.valueOf(htableName));
Iterator<Result> iterator = tableInterface.getScanner(scan).iterator();
int counter = 0;
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
index 4bd2c53..1681050 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
@@ -40,7 +40,9 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.AbstractApplication;
import org.apache.kylin.common.util.CliCommandExecutor;
@@ -54,6 +56,7 @@ import org.apache.kylin.job.engine.JobEngineConfig;
import org.apache.kylin.job.execution.ExecutableState;
import org.apache.kylin.job.manager.ExecutableManager;
import org.apache.kylin.metadata.realization.IRealizationConstants;
+import org.apache.kylin.storage.hbase.HBaseConnection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -72,7 +75,8 @@ public class StorageCleanupJob extends AbstractApplication {
CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
long TIME_THREADSHOLD = KylinConfig.getInstanceFromEnv().getStorageCleanupTimeThreshold();
// get all kylin hbase tables
- HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
+ Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+ Admin hbaseAdmin = conn.getAdmin();
String tableNamePrefix = IRealizationConstants.SharedHbaseStorageLocationPrefix;
HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
List<String> allTablesNeedToBeDropped = new ArrayList<String>();
@@ -150,22 +154,22 @@ public class StorageCleanupJob extends AbstractApplication {
}
class DeleteHTableRunnable implements Callable {
- HBaseAdmin hbaseAdmin;
+ Admin hbaseAdmin;
String htableName;
- DeleteHTableRunnable(HBaseAdmin hbaseAdmin, String htableName) {
+ DeleteHTableRunnable(Admin hbaseAdmin, String htableName) {
this.hbaseAdmin = hbaseAdmin;
this.htableName = htableName;
}
public Object call() throws Exception {
logger.info("Deleting HBase table " + htableName);
- if (hbaseAdmin.tableExists(htableName)) {
- if (hbaseAdmin.isTableEnabled(htableName)) {
- hbaseAdmin.disableTable(htableName);
+ if (hbaseAdmin.tableExists(TableName.valueOf(htableName))) {
+ if (hbaseAdmin.isTableEnabled(TableName.valueOf(htableName))) {
+ hbaseAdmin.disableTable(TableName.valueOf(htableName));
}
- hbaseAdmin.deleteTable(htableName);
+ hbaseAdmin.deleteTable(TableName.valueOf(htableName));
logger.info("Deleted HBase table " + htableName);
} else {
logger.info("HBase table" + htableName + " does not exist");
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
index e36f662..42a54c8 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
@@ -24,16 +24,18 @@ import java.util.Arrays;
import java.util.List;
import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.cube.CubeInstance;
import org.apache.kylin.cube.CubeManager;
import org.apache.kylin.cube.CubeSegment;
import org.apache.kylin.metadata.model.SegmentStatusEnum;
import org.apache.kylin.metadata.realization.IRealizationConstants;
-import org.apache.kylin.storage.hbase.HBaseConnection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -49,14 +51,15 @@ public class UpdateHTableHostCLI {
private List<String> errorMsgs = Lists.newArrayList();
private List<String> htables;
- private HBaseAdmin hbaseAdmin;
+ private Admin hbaseAdmin;
private KylinConfig kylinConfig;
private String oldHostValue;
public UpdateHTableHostCLI(List<String> htables, String oldHostValue) throws IOException {
this.htables = htables;
this.oldHostValue = oldHostValue;
- this.hbaseAdmin = new HBaseAdmin(HBaseConnection.getCurrentHBaseConfiguration());
+ Connection conn = ConnectionFactory.createConnection(HBaseConfiguration.create());
+ hbaseAdmin = conn.getAdmin();
this.kylinConfig = KylinConfig.getInstanceFromEnv();
}
@@ -166,9 +169,9 @@ public class UpdateHTableHostCLI {
HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
if (oldHostValue.equals(desc.getValue(IRealizationConstants.HTableTag))) {
desc.setValue(IRealizationConstants.HTableTag, kylinConfig.getMetadataUrlPrefix());
- hbaseAdmin.disableTable(tableName);
- hbaseAdmin.modifyTable(tableName, desc);
- hbaseAdmin.enableTable(tableName);
+ hbaseAdmin.disableTable(TableName.valueOf(tableName));
+ hbaseAdmin.modifyTable(TableName.valueOf(tableName), desc);
+ hbaseAdmin.enableTable(TableName.valueOf(tableName));
updatedResources.add(tableName);
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java
index f8e2644..fe06a31 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.hadoop.hbase.regionserver.ScannerContext;
import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.metadata.datatype.LongMutable;
import org.apache.kylin.metadata.model.ColumnDesc;
@@ -230,15 +231,8 @@ public class AggregateRegionObserverTest {
return nextRaw(results);
}
- /*
- * (non-Javadoc)
- *
- * @see
- * org.apache.hadoop.hbase.regionserver.InternalScanner#next(java.util
- * .List, int)
- */
@Override
- public boolean next(List<Cell> result, int limit) throws IOException {
+ public boolean next(List<Cell> result, ScannerContext scannerContext) throws IOException {
return next(result);
}
@@ -307,6 +301,11 @@ public class AggregateRegionObserverTest {
return 0;
}
+ @Override
+ public int getBatch() {
+ return 0;
+ }
+
/*
* (non-Javadoc)
*
@@ -323,16 +322,9 @@ public class AggregateRegionObserverTest {
return i < input.size();
}
- /*
- * (non-Javadoc)
- *
- * @see
- * org.apache.hadoop.hbase.regionserver.RegionScanner#nextRaw(java.util
- * .List, int)
- */
@Override
- public boolean nextRaw(List<Cell> result, int limit) throws IOException {
- return nextRaw(result);
+ public boolean nextRaw(List<Cell> list, ScannerContext scannerContext) throws IOException {
+ return false;
}
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/filter/TestFuzzyRowFilterV2EndToEnd.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/filter/TestFuzzyRowFilterV2EndToEnd.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/filter/TestFuzzyRowFilterV2EndToEnd.java
index 1d85922..04e2e8b 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/filter/TestFuzzyRowFilterV2EndToEnd.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/filter/TestFuzzyRowFilterV2EndToEnd.java
@@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
@@ -136,7 +137,7 @@ public class TestFuzzyRowFilterV2EndToEnd {
Put p = new Put(rk);
p.setDurability(Durability.SKIP_WAL);
- p.add(cf.getBytes(), cq, Bytes.toBytes(c));
+ p.addColumn(cf.getBytes(), cq, Bytes.toBytes(c));
ht.put(p);
}
}
@@ -224,7 +225,7 @@ public class TestFuzzyRowFilterV2EndToEnd {
scan.addFamily(cf.getBytes());
scan.setFilter(filter);
List<HRegion> regions = TEST_UTIL.getHBaseCluster().getRegions(table.getBytes());
- HRegion first = regions.get(0);
+ Region first = regions.get(0);
first.getScanner(scan);
RegionScanner scanner = first.getScanner(scan);
List<Cell> results = new ArrayList<Cell>();
[10/10] kylin git commit: KYLIN-1528 Create a branch for v1.5 with
HBase 1.x API
Posted by li...@apache.org.
KYLIN-1528 Create a branch for v1.5 with HBase 1.x API
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/b70684e6
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/b70684e6
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/b70684e6
Branch: refs/heads/1.5.x-HBase1.x
Commit: b70684e66d31b6fcfe6ca7b55b08a20768868c61
Parents: eb92f96
Author: shaofengshi <sh...@apache.org>
Authored: Wed Mar 23 17:07:05 2016 +0800
Committer: lidongsjtu <li...@apache.org>
Committed: Wed Sep 7 11:41:09 2016 +0800
----------------------------------------------------------------------
examples/test_case_data/sandbox/hbase-site.xml | 19 +---
.../test_case_data/sandbox/kylin_job_conf.xml | 86 +++++++++---------
examples/test_case_data/sandbox/mapred-site.xml | 23 +++--
.../kylin/provision/BuildCubeWithEngine.java | 13 ++-
pom.xml | 12 +--
.../kylin/rest/security/AclHBaseStorage.java | 4 +-
.../rest/security/MockAclHBaseStorage.java | 8 +-
.../apache/kylin/rest/security/MockHTable.java | 95 ++++----------------
.../rest/security/RealAclHBaseStorage.java | 9 +-
.../apache/kylin/rest/service/AclService.java | 25 +++---
.../apache/kylin/rest/service/CubeService.java | 36 +++-----
.../apache/kylin/rest/service/QueryService.java | 24 +++--
.../apache/kylin/rest/service/UserService.java | 17 ++--
.../kylin/storage/hbase/HBaseConnection.java | 44 ++++-----
.../kylin/storage/hbase/HBaseResourceStore.java | 31 +++----
.../kylin/storage/hbase/HBaseStorage.java | 3 +-
.../storage/hbase/cube/SimpleHBaseStore.java | 20 ++---
.../hbase/cube/v1/CubeSegmentTupleIterator.java | 11 +--
.../storage/hbase/cube/v1/CubeStorageQuery.java | 6 +-
.../hbase/cube/v1/RegionScannerAdapter.java | 10 ++-
.../cube/v1/SerializedHBaseTupleIterator.java | 4 +-
.../observer/AggregateRegionObserver.java | 4 +-
.../observer/AggregationScanner.java | 14 ++-
.../observer/ObserverAggregationCache.java | 10 ++-
.../coprocessor/observer/ObserverEnabler.java | 4 +-
.../hbase/cube/v2/CubeHBaseEndpointRPC.java | 13 +--
.../storage/hbase/cube/v2/CubeHBaseScanRPC.java | 9 +-
.../hbase/cube/v2/ExpectedSizeIterator.java | 10 +--
.../coprocessor/endpoint/CubeVisitService.java | 4 +-
.../storage/hbase/steps/CubeHTableUtil.java | 16 ++--
.../storage/hbase/steps/DeprecatedGCStep.java | 23 ++---
.../storage/hbase/steps/HBaseCuboidWriter.java | 7 +-
.../hbase/steps/HBaseStreamingOutput.java | 9 +-
.../kylin/storage/hbase/steps/MergeGCStep.java | 23 ++---
.../storage/hbase/util/CleanHtableCLI.java | 12 +--
.../storage/hbase/util/CubeMigrationCLI.java | 36 ++++----
.../hbase/util/CubeMigrationCheckCLI.java | 17 ++--
.../hbase/util/DeployCoprocessorCLI.java | 22 ++---
.../hbase/util/ExtendCubeToHybridCLI.java | 8 +-
.../hbase/util/GridTableHBaseBenchmark.java | 34 +++----
.../kylin/storage/hbase/util/HBaseClean.java | 18 ++--
.../hbase/util/HBaseRegionSizeCalculator.java | 35 ++++----
.../kylin/storage/hbase/util/HBaseUsage.java | 9 +-
.../storage/hbase/util/HbaseStreamingInput.java | 30 +++----
.../hbase/util/HtableAlterMetadataCLI.java | 9 +-
.../storage/hbase/util/OrphanHBaseCleanJob.java | 19 ++--
.../kylin/storage/hbase/util/PingHBaseCLI.java | 15 ++--
.../kylin/storage/hbase/util/RowCounterCLI.java | 11 +--
.../storage/hbase/util/StorageCleanupJob.java | 20 +++--
.../storage/hbase/util/UpdateHTableHostCLI.java | 17 ++--
.../observer/AggregateRegionObserverTest.java | 26 ++----
.../v1/filter/TestFuzzyRowFilterV2EndToEnd.java | 5 +-
52 files changed, 464 insertions(+), 525 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/examples/test_case_data/sandbox/hbase-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/hbase-site.xml b/examples/test_case_data/sandbox/hbase-site.xml
index 46d5345..734908e 100644
--- a/examples/test_case_data/sandbox/hbase-site.xml
+++ b/examples/test_case_data/sandbox/hbase-site.xml
@@ -190,22 +190,5 @@
<name>zookeeper.znode.parent</name>
<value>/hbase-unsecure</value>
</property>
- <property>
- <name>hbase.client.pause</name>
- <value>100</value>
- <description>General client pause value. Used mostly as value to wait
- before running a retry of a failed get, region lookup, etc.
- See hbase.client.retries.number for description of how we backoff from
- this initial pause amount and how this pause works w/ retries.</description>
- </property>
- <property>
- <name>hbase.client.retries.number</name>
- <value>5</value>
- <description>Maximum retries. Used as maximum for all retryable
- operations such as the getting of a cell's value, starting a row update,
- etc. Retry interval is a rough function based on hbase.client.pause. At
- first we retry at this interval but then with backoff, we pretty quickly reach
- retrying every ten seconds. See HConstants#RETRY_BACKOFF for how the backup
- ramps up. Change this setting and hbase.client.pause to suit your workload.</description>
- </property>
+
</configuration>
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/examples/test_case_data/sandbox/kylin_job_conf.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin_job_conf.xml b/examples/test_case_data/sandbox/kylin_job_conf.xml
index bd947af..6082fa9 100644
--- a/examples/test_case_data/sandbox/kylin_job_conf.xml
+++ b/examples/test_case_data/sandbox/kylin_job_conf.xml
@@ -1,20 +1,18 @@
<?xml version="1.0"?>
<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
+http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License. See accompanying LICENSE file.
-->
+
<configuration>
<property>
@@ -26,44 +24,41 @@
</description>
</property>
- <property>
- <name>mapreduce.map.maxattempts</name>
- <value>2</value>
- </property>
+ <!-- uncomment the following 5 properties to enable lzo compressing
- <!--
- <property>
- <name>mapred.compress.map.output</name>
- <value>true</value>
- <description>Compress map outputs</description>
- </property>
+ <property>
+ <name>mapred.compress.map.output</name>
+ <value>true</value>
+ <description>Compress map outputs</description>
+ </property>
- <property>
- <name>mapred.map.output.compression.codec</name>
- <value>org.apache.hadoop.io.compress.SnappyCodec</value>
- <description>The compression codec to use for map outputs
- </description>
- </property>
+ <property>
+ <name>mapred.map.output.compression.codec</name>
+ <value>com.hadoop.compression.lzo.LzoCodec</value>
+ <description>The compression codec to use for map outputs
+ </description>
+ </property>
- <property>
- <name>mapred.output.compress</name>
- <value>true</value>
- <description>Compress the output of a MapReduce job</description>
- </property>
+ <property>
+ <name>mapred.output.compress</name>
+ <value>true</value>
+ <description>Compress the output of a MapReduce job</description>
+ </property>
- <property>
- <name>mapred.output.compression.codec</name>
- <value>org.apache.hadoop.io.compress.SnappyCodec</value>
- <description>The compression codec to use for job outputs
- </description>
- </property>
+ <property>
+ <name>mapred.output.compression.codec</name>
+ <value>com.hadoop.compression.lzo.LzoCodec</value>
+ <description>The compression codec to use for job outputs
+ </description>
+ </property>
- <property>
- <name>mapred.output.compression.type</name>
- <value>BLOCK</value>
- <description>The compression type to use for job outputs</description>
- </property>
--->
+ <property>
+ <name>mapred.output.compression.type</name>
+ <value>BLOCK</value>
+ <description>The compression type to use for job outputs</description>
+ </property>
+
+ !-->
<property>
<name>mapreduce.job.max.split.locations</name>
@@ -76,5 +71,4 @@
<value>2</value>
<description>Block replication</description>
</property>
-
</configuration>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/examples/test_case_data/sandbox/mapred-site.xml
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/mapred-site.xml b/examples/test_case_data/sandbox/mapred-site.xml
index 18f6feb..ff1c7eb 100644
--- a/examples/test_case_data/sandbox/mapred-site.xml
+++ b/examples/test_case_data/sandbox/mapred-site.xml
@@ -18,7 +18,7 @@
<property>
<name>io.sort.mb</name>
- <value>128</value>
+ <value>64</value>
</property>
<property>
@@ -28,12 +28,12 @@
<property>
<name>mapred.job.map.memory.mb</name>
- <value>512</value>
+ <value>250</value>
</property>
<property>
<name>mapred.job.reduce.memory.mb</name>
- <value>512</value>
+ <value>250</value>
</property>
<property>
@@ -58,7 +58,7 @@
<property>
<name>mapreduce.application.classpath</name>
- <value>/tmp/kylin/*,$HADOOP_CONF_DIR,/usr/hdp/${hdp.version}/hbase/lib/hbase-common.jar,/usr/hdp/current/hive-client/conf/,$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.${hdp.version}.jar:/usr/hdp/${hdp.version}/hadoop/lib/snappy-java-1.0.4.1.jar:/etc/hadoop/conf/secure</value>
+ <value>/tmp/kylin/*,$HADOOP_CONF_DIR,/usr/hdp/${hdp.version}/hbase/lib/hbase-common.jar,/usr/hdp/current/hive-client/conf/,$PWD/mr-framework/hadoop/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop/share/hadoop/common/*:$PWD/mr-framework/hadoop/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/*:$PWD/mr-framework/hadoop/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop/share/hadoop/hdfs/lib/*:/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.${hdp.version}.jar:/etc/hadoop/conf/secure</value>
</property>
<property>
@@ -81,10 +81,9 @@
<value>false</value>
</property>
- <!--the default value on hdp is 0.05, however for test environments we need to be conservative on resource -->
<property>
<name>mapreduce.job.reduce.slowstart.completedmaps</name>
- <value>1</value>
+ <value>0.05</value>
</property>
<property>
@@ -114,7 +113,7 @@
<property>
<name>mapreduce.map.java.opts</name>
- <value>-Xmx512m</value>
+ <value>-Xmx200m</value>
</property>
<property>
@@ -124,7 +123,7 @@
<property>
<name>mapreduce.map.memory.mb</name>
- <value>512</value>
+ <value>250</value>
</property>
<property>
@@ -169,7 +168,7 @@
<property>
<name>mapreduce.reduce.memory.mb</name>
- <value>512</value>
+ <value>250</value>
</property>
<property>
@@ -219,7 +218,7 @@
<property>
<name>mapreduce.task.io.sort.mb</name>
- <value>128</value>
+ <value>64</value>
</property>
<property>
@@ -234,7 +233,7 @@
<property>
<name>yarn.app.mapreduce.am.command-opts</name>
- <value>-Xmx512m</value>
+ <value>-Xmx200m</value>
</property>
<property>
@@ -244,7 +243,7 @@
<property>
<name>yarn.app.mapreduce.am.resource.mb</name>
- <value>512</value>
+ <value>250</value>
</property>
<property>
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
index 3d60a3c..0910df5 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
@@ -35,8 +35,7 @@ import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.ClassUtil;
import org.apache.kylin.common.util.HBaseMetadataTestCase;
@@ -55,6 +54,7 @@ import org.apache.kylin.job.execution.DefaultChainedExecutable;
import org.apache.kylin.job.execution.ExecutableState;
import org.apache.kylin.job.impl.threadpool.DefaultScheduler;
import org.apache.kylin.job.manager.ExecutableManager;
+import org.apache.kylin.storage.hbase.HBaseConnection;
import org.apache.kylin.storage.hbase.util.HBaseRegionSizeCalculator;
import org.apache.kylin.storage.hbase.util.StorageCleanupJob;
import org.apache.kylin.storage.hbase.util.ZookeeperJobLock;
@@ -419,10 +419,10 @@ public class BuildCubeWithEngine {
}
private void checkHFilesInHBase(CubeSegment segment) throws IOException {
- Configuration conf = HBaseConfiguration.create(HadoopUtil.getCurrentConfiguration());
- String tableName = segment.getStorageLocationIdentifier();
- try (HTable table = new HTable(conf, tableName)) {
- HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(table);
+ try (Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl())) {
+ String tableName = segment.getStorageLocationIdentifier();
+
+ HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(tableName, conn);
Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
long totalSize = 0;
for (Long size : sizeMap.values()) {
@@ -448,5 +448,4 @@ public class BuildCubeWithEngine {
}
}
}
-
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index d5925b9..93b6b46 100644
--- a/pom.xml
+++ b/pom.xml
@@ -46,20 +46,20 @@
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<!-- Hadoop versions -->
- <hadoop2.version>2.6.0</hadoop2.version>
- <yarn.version>2.6.0</yarn.version>
+ <hadoop2.version>2.7.1</hadoop2.version>
+ <yarn.version>2.7.1</yarn.version>
<!-- Hive versions -->
- <hive.version>0.14.0</hive.version>
- <hive-hcatalog.version>0.14.0</hive-hcatalog.version>
+ <hive.version>1.2.1</hive.version>
+ <hive-hcatalog.version>1.2.1</hive-hcatalog.version>
<!-- HBase versions -->
- <hbase-hadoop2.version>0.98.8-hadoop2</hbase-hadoop2.version>
+ <hbase-hadoop2.version>1.1.1</hbase-hadoop2.version>
<kafka.version>0.8.1</kafka.version>
<!-- Hadoop deps, keep compatible with hadoop2.version -->
<zookeeper.version>3.4.6</zookeeper.version>
- <curator.version>2.6.0</curator.version>
+ <curator.version>2.7.1</curator.version>
<jackson.version>2.2.4</jackson.version>
<jsr305.version>3.0.1</jsr305.version>
<guava.version>14.0</guava.version>
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
index 38f299e..bfb5fe4 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/AclHBaseStorage.java
@@ -20,7 +20,7 @@ package org.apache.kylin.rest.security;
import java.io.IOException;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Table;
/**
*/
@@ -37,6 +37,6 @@ public interface AclHBaseStorage {
String prepareHBaseTable(Class<?> clazz) throws IOException;
- HTableInterface getTable(String tableName) throws IOException;
+ Table getTable(String tableName) throws IOException;
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
index d9326f5..cc76b87 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/MockAclHBaseStorage.java
@@ -21,7 +21,7 @@ package org.apache.kylin.rest.security;
import java.io.IOException;
import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.rest.service.AclService;
import org.apache.kylin.rest.service.QueryService;
@@ -34,8 +34,8 @@ public class MockAclHBaseStorage implements AclHBaseStorage {
private static final String aclTableName = "MOCK-ACL-TABLE";
private static final String userTableName = "MOCK-USER-TABLE";
- private HTableInterface mockedAclTable;
- private HTableInterface mockedUserTable;
+ private Table mockedAclTable;
+ private Table mockedUserTable;
private RealAclHBaseStorage realAcl;
public MockAclHBaseStorage() {
@@ -65,7 +65,7 @@ public class MockAclHBaseStorage implements AclHBaseStorage {
}
@Override
- public HTableInterface getTable(String tableName) throws IOException {
+ public Table getTable(String tableName) throws IOException {
if (realAcl != null) {
return realAcl.getTable(tableName);
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java b/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
index d0aa0ed..972eea9 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/MockHTable.java
@@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
@@ -91,7 +91,7 @@ import com.google.protobuf.ServiceException;
* <li>remove some methods for loading data, checking values ...</li>
* </ul>
*/
-public class MockHTable implements HTableInterface {
+public class MockHTable implements Table {
private final String tableName;
private final List<String> columnFamilies = new ArrayList<>();
@@ -114,14 +114,6 @@ public class MockHTable implements HTableInterface {
this.columnFamilies.add(columnFamily);
}
- /**
- * {@inheritDoc}
- */
- @Override
- public byte[] getTableName() {
- return tableName.getBytes();
- }
-
@Override
public TableName getName() {
return null;
@@ -200,8 +192,8 @@ public class MockHTable implements HTableInterface {
}
@Override
- public Boolean[] exists(List<Get> gets) throws IOException {
- return new Boolean[0];
+ public boolean[] existsAll(List<Get> list) throws IOException {
+ return new boolean[0];
}
/**
@@ -306,15 +298,6 @@ public class MockHTable implements HTableInterface {
* {@inheritDoc}
*/
@Override
- public Result getRowOrBefore(byte[] row, byte[] family) throws IOException {
- // FIXME: implement
- return null;
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
public ResultScanner getScanner(Scan scan) throws IOException {
final List<Result> ret = new ArrayList<Result>();
byte[] st = scan.getStartRow();
@@ -446,7 +429,7 @@ public class MockHTable implements HTableInterface {
*/
}
if (filter.hasFilterRow() && !filteredOnRowKey) {
- filter.filterRow(nkvs);
+ filter.filterRow();
}
if (filter.filterRow() || filteredOnRowKey) {
nkvs.clear();
@@ -535,6 +518,11 @@ public class MockHTable implements HTableInterface {
return false;
}
+ @Override
+ public boolean checkAndPut(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, byte[] bytes3, Put put) throws IOException {
+ return false;
+ }
+
/**
* {@inheritDoc}
*/
@@ -555,7 +543,7 @@ public class MockHTable implements HTableInterface {
continue;
}
for (KeyValue kv : delete.getFamilyMap().get(family)) {
- if (kv.isDeleteFamily()) {
+ if (kv.isDelete()) {
data.get(row).get(kv.getFamily()).clear();
} else {
data.get(row).get(kv.getFamily()).remove(kv.getQualifier());
@@ -592,6 +580,11 @@ public class MockHTable implements HTableInterface {
return false;
}
+ @Override
+ public boolean checkAndDelete(byte[] bytes, byte[] bytes1, byte[] bytes2, CompareFilter.CompareOp compareOp, byte[] bytes3, Delete delete) throws IOException {
+ return false;
+ }
+
/**
* {@inheritDoc}
*/
@@ -605,7 +598,7 @@ public class MockHTable implements HTableInterface {
*/
@Override
public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount) throws IOException {
- return incrementColumnValue(row, family, qualifier, amount, true);
+ return incrementColumnValue(row, family, qualifier, amount, null);
}
@Override
@@ -617,37 +610,6 @@ public class MockHTable implements HTableInterface {
* {@inheritDoc}
*/
@Override
- public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount, boolean writeToWAL) throws IOException {
- if (check(row, family, qualifier, null)) {
- Put put = new Put(row);
- put.add(family, qualifier, Bytes.toBytes(amount));
- put(put);
- return amount;
- }
- long newValue = Bytes.toLong(data.get(row).get(family).get(qualifier).lastEntry().getValue()) + amount;
- data.get(row).get(family).get(qualifier).put(System.currentTimeMillis(), Bytes.toBytes(newValue));
- return newValue;
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public boolean isAutoFlush() {
- return true;
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public void flushCommits() throws IOException {
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
public void close() throws IOException {
}
@@ -673,29 +635,6 @@ public class MockHTable implements HTableInterface {
* {@inheritDoc}
*/
@Override
- public void setAutoFlush(boolean autoFlush) {
- throw new NotImplementedException();
-
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public void setAutoFlush(boolean autoFlush, boolean clearBufferOnFail) {
- throw new NotImplementedException();
-
- }
-
- @Override
- public void setAutoFlushTo(boolean autoFlush) {
- throw new NotImplementedException();
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
public long getWriteBufferSize() {
throw new NotImplementedException();
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java b/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
index ab18029..d55edc3 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/RealAclHBaseStorage.java
@@ -21,7 +21,8 @@ package org.apache.kylin.rest.security;
import java.io.IOException;
import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.rest.service.AclService;
import org.apache.kylin.rest.service.QueryService;
@@ -57,11 +58,11 @@ public class RealAclHBaseStorage implements AclHBaseStorage {
}
@Override
- public HTableInterface getTable(String tableName) throws IOException {
+ public Table getTable(String tableName) throws IOException {
if (StringUtils.equals(tableName, aclTableName)) {
- return HBaseConnection.get(hbaseUrl).getTable(aclTableName);
+ return HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(aclTableName));
} else if (StringUtils.equals(tableName, userTableName)) {
- return HBaseConnection.get(hbaseUrl).getTable(userTableName);
+ return HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
} else {
throw new IllegalStateException("getTable failed" + tableName);
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java b/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
index d693a67..3e3efec 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/AclService.java
@@ -33,7 +33,7 @@ import javax.annotation.PostConstruct;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
@@ -124,7 +124,7 @@ public class AclService implements MutableAclService {
@Override
public List<ObjectIdentity> findChildren(ObjectIdentity parentIdentity) {
List<ObjectIdentity> oids = new ArrayList<ObjectIdentity>();
- HTableInterface htable = null;
+ Table htable = null;
try {
htable = aclHBaseStorage.getTable(aclTableName);
@@ -173,7 +173,7 @@ public class AclService implements MutableAclService {
@Override
public Map<ObjectIdentity, Acl> readAclsById(List<ObjectIdentity> oids, List<Sid> sids) throws NotFoundException {
Map<ObjectIdentity, Acl> aclMaps = new HashMap<ObjectIdentity, Acl>();
- HTableInterface htable = null;
+ Table htable = null;
Result result = null;
try {
htable = aclHBaseStorage.getTable(aclTableName);
@@ -226,17 +226,16 @@ public class AclService implements MutableAclService {
Authentication auth = SecurityContextHolder.getContext().getAuthentication();
PrincipalSid sid = new PrincipalSid(auth);
- HTableInterface htable = null;
+ Table htable = null;
try {
htable = aclHBaseStorage.getTable(aclTableName);
Put put = new Put(Bytes.toBytes(String.valueOf(objectIdentity.getIdentifier())));
- put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN), Bytes.toBytes(objectIdentity.getType()));
- put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN), sidSerializer.serialize(new SidInfo(sid)));
- put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN), Bytes.toBytes(true));
+ put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_TYPE_COLUMN), Bytes.toBytes(objectIdentity.getType()));
+ put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_OWNER_COLUMN), sidSerializer.serialize(new SidInfo(sid)));
+ put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_ENTRY_INHERIT_COLUMN), Bytes.toBytes(true));
htable.put(put);
- htable.flushCommits();
logger.debug("ACL of " + objectIdentity + " created successfully.");
} catch (IOException e) {
@@ -250,7 +249,7 @@ public class AclService implements MutableAclService {
@Override
public void deleteAcl(ObjectIdentity objectIdentity, boolean deleteChildren) throws ChildrenExistException {
- HTableInterface htable = null;
+ Table htable = null;
try {
htable = aclHBaseStorage.getTable(aclTableName);
@@ -266,7 +265,6 @@ public class AclService implements MutableAclService {
}
htable.delete(delete);
- htable.flushCommits();
logger.debug("ACL of " + objectIdentity + " deleted successfully.");
} catch (IOException e) {
@@ -284,7 +282,7 @@ public class AclService implements MutableAclService {
throw e;
}
- HTableInterface htable = null;
+ Table htable = null;
try {
htable = aclHBaseStorage.getTable(aclTableName);
@@ -295,17 +293,16 @@ public class AclService implements MutableAclService {
Put put = new Put(Bytes.toBytes(String.valueOf(acl.getObjectIdentity().getIdentifier())));
if (null != acl.getParentAcl()) {
- put.add(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN), domainObjSerializer.serialize(new DomainObjectInfo(acl.getParentAcl().getObjectIdentity())));
+ put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_INFO_FAMILY), Bytes.toBytes(ACL_INFO_FAMILY_PARENT_COLUMN), domainObjSerializer.serialize(new DomainObjectInfo(acl.getParentAcl().getObjectIdentity())));
}
for (AccessControlEntry ace : acl.getEntries()) {
AceInfo aceInfo = new AceInfo(ace);
- put.add(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY), Bytes.toBytes(aceInfo.getSidInfo().getSid()), aceSerializer.serialize(aceInfo));
+ put.addColumn(Bytes.toBytes(AclHBaseStorage.ACL_ACES_FAMILY), Bytes.toBytes(aceInfo.getSidInfo().getSid()), aceSerializer.serialize(aceInfo));
}
if (!put.isEmpty()) {
htable.put(put);
- htable.flushCommits();
logger.debug("ACL of " + acl.getObjectIdentity() + " updated successfully.");
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
index 4cd527c..945a111 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -28,8 +28,7 @@ import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.Pair;
import org.apache.kylin.cube.CubeInstance;
@@ -424,35 +423,24 @@ public class CubeService extends BasicService {
if (htableInfoCache.containsKey(tableName)) {
return htableInfoCache.get(tableName);
}
-
- Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
- HTable table = null;
+ Connection conn = HBaseConnection.get(this.getConfig().getStorageUrl());
HBaseResponse hr = null;
long tableSize = 0;
int regionCount = 0;
- try {
- table = new HTable(hconf, tableName);
-
- HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(table);
- Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
-
- for (long s : sizeMap.values()) {
- tableSize += s;
- }
-
- regionCount = sizeMap.size();
+ HBaseRegionSizeCalculator cal = new HBaseRegionSizeCalculator(tableName, conn);
+ Map<byte[], Long> sizeMap = cal.getRegionSizeMap();
- // Set response.
- hr = new HBaseResponse();
- hr.setTableSize(tableSize);
- hr.setRegionCount(regionCount);
- } finally {
- if (null != table) {
- table.close();
- }
+ for (long s : sizeMap.values()) {
+ tableSize += s;
}
+ regionCount = sizeMap.size();
+
+ // Set response.
+ hr = new HBaseResponse();
+ hr.setTableSize(tableSize);
+ hr.setRegionCount(regionCount);
htableInfoCache.put(tableName, hr);
return hr;
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
index df296cf..340f142 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -45,11 +45,11 @@ import javax.sql.DataSource;
import org.apache.calcite.avatica.ColumnMetaData.Rep;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.cube.CubeInstance;
@@ -137,14 +137,13 @@ public class QueryService extends BasicService {
Query[] queryArray = new Query[queries.size()];
byte[] bytes = querySerializer.serialize(queries.toArray(queryArray));
- HTableInterface htable = null;
+ Table htable = null;
try {
- htable = HBaseConnection.get(hbaseUrl).getTable(userTableName);
+ htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
Put put = new Put(Bytes.toBytes(creator));
- put.add(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
+ put.addColumn(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
htable.put(put);
- htable.flushCommits();
} finally {
IOUtils.closeQuietly(htable);
}
@@ -170,14 +169,13 @@ public class QueryService extends BasicService {
Query[] queryArray = new Query[queries.size()];
byte[] bytes = querySerializer.serialize(queries.toArray(queryArray));
- HTableInterface htable = null;
+ Table htable = null;
try {
- htable = HBaseConnection.get(hbaseUrl).getTable(userTableName);
+ htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
Put put = new Put(Bytes.toBytes(creator));
- put.add(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
+ put.addColumn(Bytes.toBytes(USER_QUERY_FAMILY), Bytes.toBytes(USER_QUERY_COLUMN), bytes);
htable.put(put);
- htable.flushCommits();
} finally {
IOUtils.closeQuietly(htable);
}
@@ -189,12 +187,12 @@ public class QueryService extends BasicService {
}
List<Query> queries = new ArrayList<Query>();
- HTableInterface htable = null;
+ Table htable = null;
try {
- HConnection conn = HBaseConnection.get(hbaseUrl);
+ org.apache.hadoop.hbase.client.Connection conn = HBaseConnection.get(hbaseUrl);
HBaseConnection.createHTableIfNeeded(conn, userTableName, USER_QUERY_FAMILY);
- htable = conn.getTable(userTableName);
+ htable = HBaseConnection.get(hbaseUrl).getTable(TableName.valueOf(userTableName));
Get get = new Get(Bytes.toBytes(creator));
get.addFamily(Bytes.toBytes(USER_QUERY_FAMILY));
Result result = htable.get(get);
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java b/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
index 07c7c6f..ab54882 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/UserService.java
@@ -30,11 +30,11 @@ import javax.annotation.PostConstruct;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.common.util.Pair;
import org.apache.kylin.rest.security.AclHBaseStorage;
@@ -72,7 +72,7 @@ public class UserService implements UserDetailsManager {
@Override
public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException {
- HTableInterface htable = null;
+ Table htable = null;
try {
htable = aclHBaseStorage.getTable(userTableName);
@@ -144,16 +144,16 @@ public class UserService implements UserDetailsManager {
@Override
public void updateUser(UserDetails user) {
- HTableInterface htable = null;
+ Table htable = null;
try {
htable = aclHBaseStorage.getTable(userTableName);
Pair<byte[], byte[]> pair = userToHBaseRow(user);
Put put = new Put(pair.getKey());
- put.add(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN), pair.getSecond());
+
+ put.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN), pair.getSecond());
htable.put(put);
- htable.flushCommits();
} catch (IOException e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
@@ -163,14 +163,13 @@ public class UserService implements UserDetailsManager {
@Override
public void deleteUser(String username) {
- HTableInterface htable = null;
+ Table htable = null;
try {
htable = aclHBaseStorage.getTable(userTableName);
Delete delete = new Delete(Bytes.toBytes(username));
htable.delete(delete);
- htable.flushCommits();
} catch (IOException e) {
throw new RuntimeException(e.getMessage(), e);
} finally {
@@ -185,7 +184,7 @@ public class UserService implements UserDetailsManager {
@Override
public boolean userExists(String username) {
- HTableInterface htable = null;
+ Table htable = null;
try {
htable = aclHBaseStorage.getTable(userTableName);
@@ -216,7 +215,7 @@ public class UserService implements UserDetailsManager {
s.addColumn(Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_FAMILY), Bytes.toBytes(AclHBaseStorage.USER_AUTHORITY_COLUMN));
List<UserDetails> all = new ArrayList<UserDetails>();
- HTableInterface htable = null;
+ Table htable = null;
ResultScanner scanner = null;
try {
htable = aclHBaseStorage.getTable(userTableName);
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
index cbf81b6..b769391 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseConnection.java
@@ -40,9 +40,9 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.kylin.common.KylinConfig;
@@ -64,7 +64,7 @@ public class HBaseConnection {
private static final Logger logger = LoggerFactory.getLogger(HBaseConnection.class);
private static final Map<String, Configuration> configCache = new ConcurrentHashMap<String, Configuration>();
- private static final Map<String, HConnection> connPool = new ConcurrentHashMap<String, HConnection>();
+ private static final Map<String, Connection> connPool = new ConcurrentHashMap<String, Connection>();
private static final ThreadLocal<Configuration> configThreadLocal = new ThreadLocal<>();
private static ExecutorService coprocessorPool = null;
@@ -75,7 +75,7 @@ public class HBaseConnection {
public void run() {
closeCoprocessorPool();
- for (HConnection conn : connPool.values()) {
+ for (Connection conn : connPool.values()) {
try {
conn.close();
} catch (IOException e) {
@@ -144,7 +144,7 @@ public class HBaseConnection {
// using a hbase:xxx URL is deprecated, instead hbase config is always loaded from hbase-site.xml in classpath
if (!(StringUtils.isEmpty(url) || "hbase".equals(url)))
throw new IllegalArgumentException("to use hbase storage, pls set 'kylin.storage.url=hbase' in kylin.properties");
-
+
Configuration conf = HBaseConfiguration.create(HadoopUtil.getCurrentConfiguration());
addHBaseClusterNNHAConfiguration(conf);
@@ -213,9 +213,9 @@ public class HBaseConnection {
// ============================================================================
- // returned HConnection can be shared by multiple threads and does not require close()
+ // returned Connection can be shared by multiple threads and does not require close()
@SuppressWarnings("resource")
- public static HConnection get(String url) {
+ public static Connection get(String url) {
// find configuration
Configuration conf = configCache.get(url);
if (conf == null) {
@@ -223,13 +223,13 @@ public class HBaseConnection {
configCache.put(url, conf);
}
- HConnection connection = connPool.get(url);
+ Connection connection = connPool.get(url);
try {
while (true) {
// I don't use DCL since recreate a connection is not a big issue.
if (connection == null || connection.isClosed()) {
logger.info("connection is null or closed, creating a new one");
- connection = HConnectionManager.createConnection(conf);
+ connection = ConnectionFactory.createConnection(conf);
connPool.put(url, connection);
}
@@ -248,8 +248,8 @@ public class HBaseConnection {
return connection;
}
- public static boolean tableExists(HConnection conn, String tableName) throws IOException {
- HBaseAdmin hbase = new HBaseAdmin(conn);
+ public static boolean tableExists(Connection conn, String tableName) throws IOException {
+ Admin hbase = conn.getAdmin();
try {
return hbase.tableExists(TableName.valueOf(tableName));
} finally {
@@ -269,18 +269,18 @@ public class HBaseConnection {
deleteTable(HBaseConnection.get(hbaseUrl), tableName);
}
- public static void createHTableIfNeeded(HConnection conn, String table, String... families) throws IOException {
- HBaseAdmin hbase = new HBaseAdmin(conn);
-
+ public static void createHTableIfNeeded(Connection conn, String table, String... families) throws IOException {
+ Admin hbase = conn.getAdmin();
+ TableName tableName = TableName.valueOf(table);
try {
if (tableExists(conn, table)) {
logger.debug("HTable '" + table + "' already exists");
- Set<String> existingFamilies = getFamilyNames(hbase.getTableDescriptor(TableName.valueOf(table)));
+ Set<String> existingFamilies = getFamilyNames(hbase.getTableDescriptor(tableName));
boolean wait = false;
for (String family : families) {
if (existingFamilies.contains(family) == false) {
logger.debug("Adding family '" + family + "' to HTable '" + table + "'");
- hbase.addColumn(table, newFamilyDescriptor(family));
+ hbase.addColumn(tableName, newFamilyDescriptor(family));
// addColumn() is async, is there a way to wait it finish?
wait = true;
}
@@ -333,8 +333,8 @@ public class HBaseConnection {
return fd;
}
- public static void deleteTable(HConnection conn, String tableName) throws IOException {
- HBaseAdmin hbase = new HBaseAdmin(conn);
+ public static void deleteTable(Connection conn, String tableName) throws IOException {
+ Admin hbase = conn.getAdmin();
try {
if (!tableExists(conn, tableName)) {
@@ -344,10 +344,10 @@ public class HBaseConnection {
logger.debug("delete HTable '" + tableName + "'");
- if (hbase.isTableEnabled(tableName)) {
- hbase.disableTable(tableName);
+ if (hbase.isTableEnabled(TableName.valueOf(tableName))) {
+ hbase.disableTable(TableName.valueOf(tableName));
}
- hbase.deleteTable(tableName);
+ hbase.deleteTable(TableName.valueOf(tableName));
logger.debug("HTable '" + tableName + "' deleted");
} finally {
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
index e2f3661..50524b6 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseResourceStore.java
@@ -31,14 +31,15 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
@@ -69,7 +70,7 @@ public class HBaseResourceStore extends ResourceStore {
final String tableNameBase;
final String hbaseUrl;
- private HConnection getConnection() throws IOException {
+ private Connection getConnection() throws IOException {
return HBaseConnection.get(hbaseUrl);
}
@@ -120,7 +121,7 @@ public class HBaseResourceStore extends ResourceStore {
byte[] endRow = Bytes.toBytes(lookForPrefix);
endRow[endRow.length - 1]++;
- HTableInterface table = getConnection().getTable(getAllInOneTableName());
+ Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
Scan scan = new Scan(startRow, endRow);
if ((filter != null && filter instanceof KeyOnlyFilter) == false) {
scan.addColumn(B_FAMILY, B_COLUMN_TS);
@@ -238,13 +239,12 @@ public class HBaseResourceStore extends ResourceStore {
IOUtils.copy(content, bout);
bout.close();
- HTableInterface table = getConnection().getTable(getAllInOneTableName());
+ Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
try {
byte[] row = Bytes.toBytes(resPath);
Put put = buildPut(resPath, ts, row, bout.toByteArray(), table);
table.put(put);
- table.flushCommits();
} finally {
IOUtils.closeQuietly(table);
}
@@ -252,7 +252,7 @@ public class HBaseResourceStore extends ResourceStore {
@Override
protected long checkAndPutResourceImpl(String resPath, byte[] content, long oldTS, long newTS) throws IOException, IllegalStateException {
- HTableInterface table = getConnection().getTable(getAllInOneTableName());
+ Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
try {
byte[] row = Bytes.toBytes(resPath);
byte[] bOldTS = oldTS == 0 ? null : Bytes.toBytes(oldTS);
@@ -265,8 +265,6 @@ public class HBaseResourceStore extends ResourceStore {
throw new IllegalStateException("Overwriting conflict " + resPath + ", expect old TS " + oldTS + ", but it is " + real);
}
- table.flushCommits();
-
return newTS;
} finally {
IOUtils.closeQuietly(table);
@@ -275,7 +273,7 @@ public class HBaseResourceStore extends ResourceStore {
@Override
protected void deleteResourceImpl(String resPath) throws IOException {
- HTableInterface table = getConnection().getTable(getAllInOneTableName());
+ Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
try {
boolean hdfsResourceExist = false;
Result result = internalGetFromHTable(table, resPath, true, false);
@@ -288,7 +286,6 @@ public class HBaseResourceStore extends ResourceStore {
Delete del = new Delete(Bytes.toBytes(resPath));
table.delete(del);
- table.flushCommits();
if (hdfsResourceExist) { // remove hdfs cell value
Path redirectPath = bigCellHDFSPath(resPath);
@@ -310,7 +307,7 @@ public class HBaseResourceStore extends ResourceStore {
}
private Result getFromHTable(String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
- HTableInterface table = getConnection().getTable(getAllInOneTableName());
+ Table table = getConnection().getTable(TableName.valueOf(getAllInOneTableName()));
try {
return internalGetFromHTable(table, path, fetchContent, fetchTimestamp);
} finally {
@@ -318,7 +315,7 @@ public class HBaseResourceStore extends ResourceStore {
}
}
- private Result internalGetFromHTable(HTableInterface table, String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
+ private Result internalGetFromHTable(Table table, String path, boolean fetchContent, boolean fetchTimestamp) throws IOException {
byte[] rowkey = Bytes.toBytes(path);
Get get = new Get(rowkey);
@@ -337,7 +334,7 @@ public class HBaseResourceStore extends ResourceStore {
return exists ? result : null;
}
- private Path writeLargeCellToHdfs(String resPath, byte[] largeColumn, HTableInterface table) throws IOException {
+ private Path writeLargeCellToHdfs(String resPath, byte[] largeColumn, Table table) throws IOException {
Path redirectPath = bigCellHDFSPath(resPath);
Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
FileSystem fileSystem = FileSystem.get(hconf);
@@ -363,7 +360,7 @@ public class HBaseResourceStore extends ResourceStore {
return redirectPath;
}
- private Put buildPut(String resPath, long ts, byte[] row, byte[] content, HTableInterface table) throws IOException {
+ private Put buildPut(String resPath, long ts, byte[] row, byte[] content, Table table) throws IOException {
int kvSizeLimit = this.kylinConfig.getHBaseKeyValueSize();
if (content.length > kvSizeLimit) {
writeLargeCellToHdfs(resPath, content, table);
@@ -371,8 +368,8 @@ public class HBaseResourceStore extends ResourceStore {
}
Put put = new Put(row);
- put.add(B_FAMILY, B_COLUMN, content);
- put.add(B_FAMILY, B_COLUMN_TS, Bytes.toBytes(ts));
+ put.addColumn(B_FAMILY, B_COLUMN, content);
+ put.addColumn(B_FAMILY, B_COLUMN_TS, Bytes.toBytes(ts));
return put;
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseStorage.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseStorage.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseStorage.java
index f4dfd2b..3d82105 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseStorage.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/HBaseStorage.java
@@ -18,7 +18,6 @@
package org.apache.kylin.storage.hbase;
-import com.google.common.base.Preconditions;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.debug.BackdoorToggles;
import org.apache.kylin.cube.CubeInstance;
@@ -36,6 +35,8 @@ import org.apache.kylin.storage.IStorageQuery;
import org.apache.kylin.storage.hbase.steps.HBaseMROutput;
import org.apache.kylin.storage.hbase.steps.HBaseMROutput2Transition;
+import com.google.common.base.Preconditions;
+
@SuppressWarnings("unused")
//used by reflection
public class HBaseStorage implements IStorage {
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
index b141190..f63d9c2 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/SimpleHBaseStore.java
@@ -26,12 +26,13 @@ import java.util.NoSuchElementException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.BufferedMutator;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.cube.kv.RowConstants;
@@ -86,14 +87,13 @@ public class SimpleHBaseStore implements IGTStore {
}
private class Writer implements IGTWriter {
- final HTableInterface table;
+ final BufferedMutator table;
final ByteBuffer rowkey = ByteBuffer.allocate(50);
final ByteBuffer value = ByteBuffer.allocate(50);
Writer() throws IOException {
- HConnection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
- table = conn.getTable(htableName);
- table.setAutoFlush(false, true);
+ Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+ table = conn.getBufferedMutator(htableName);
}
@Override
@@ -113,24 +113,24 @@ public class SimpleHBaseStore implements IGTStore {
Put put = new Put(rowkey);
put.addImmutable(CF_B, ByteBuffer.wrap(COL_B), HConstants.LATEST_TIMESTAMP, value);
- table.put(put);
+ table.mutate(put);
}
@Override
public void close() throws IOException {
- table.flushCommits();
+ table.flush();
table.close();
}
}
class Reader implements IGTScanner {
- final HTableInterface table;
+ final Table table;
final ResultScanner scanner;
int count = 0;
Reader() throws IOException {
- HConnection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
+ Connection conn = HBaseConnection.get(KylinConfig.getInstanceFromEnv().getStorageUrl());
table = conn.getTable(htableName);
Scan scan = new Scan();
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeSegmentTupleIterator.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeSegmentTupleIterator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeSegmentTupleIterator.java
index 8ac3832..982a044 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeSegmentTupleIterator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeSegmentTupleIterator.java
@@ -25,11 +25,12 @@ import java.util.List;
import java.util.NoSuchElementException;
import java.util.Set;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FuzzyRowFilter;
@@ -70,7 +71,7 @@ public class CubeSegmentTupleIterator implements ITupleIterator {
protected final List<RowValueDecoder> rowValueDecoders;
private final StorageContext context;
private final String tableName;
- private final HTableInterface table;
+ private final Table table;
protected CubeTupleConverter tupleConverter;
protected final Iterator<HBaseKeyRange> rangeIterator;
@@ -88,7 +89,7 @@ public class CubeSegmentTupleIterator implements ITupleIterator {
private int advMeasureRowsRemaining;
private int advMeasureRowIndex;
- public CubeSegmentTupleIterator(CubeSegment cubeSeg, List<HBaseKeyRange> keyRanges, HConnection conn, //
+ public CubeSegmentTupleIterator(CubeSegment cubeSeg, List<HBaseKeyRange> keyRanges, Connection conn, //
Set<TblColRef> dimensions, TupleFilter filter, Set<TblColRef> groupBy, //
List<RowValueDecoder> rowValueDecoders, StorageContext context, TupleInfo returnTupleInfo) {
this.cubeSeg = cubeSeg;
@@ -108,7 +109,7 @@ public class CubeSegmentTupleIterator implements ITupleIterator {
this.rangeIterator = keyRanges.iterator();
try {
- this.table = conn.getTable(tableName);
+ this.table = conn.getTable(TableName.valueOf(tableName));
} catch (Throwable t) {
throw new StorageException("Error when open connection to table " + tableName, t);
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java
index ff729f4..1944327 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/CubeStorageQuery.java
@@ -33,7 +33,7 @@ import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
-import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.common.util.BytesUtil;
import org.apache.kylin.common.util.Dictionary;
@@ -46,10 +46,10 @@ import org.apache.kylin.cube.RawQueryLastHacker;
import org.apache.kylin.cube.cuboid.Cuboid;
import org.apache.kylin.cube.kv.RowConstants;
import org.apache.kylin.cube.model.CubeDesc;
+import org.apache.kylin.cube.model.CubeDesc.DeriveInfo;
import org.apache.kylin.cube.model.HBaseColumnDesc;
import org.apache.kylin.cube.model.HBaseMappingDesc;
import org.apache.kylin.cube.model.RowKeyDesc;
-import org.apache.kylin.cube.model.CubeDesc.DeriveInfo;
import org.apache.kylin.dict.lookup.LookupStringTable;
import org.apache.kylin.measure.MeasureType;
import org.apache.kylin.metadata.filter.ColumnTupleFilter;
@@ -152,7 +152,7 @@ public class CubeStorageQuery implements IStorageQuery {
setCoprocessor(groupsCopD, valueDecoders, context); // enable coprocessor if beneficial
setLimit(filter, context);
- HConnection conn = HBaseConnection.get(context.getConnUrl());
+ Connection conn = HBaseConnection.get(context.getConnUrl());
// notice we're passing filterD down to storage instead of flatFilter
return new SerializedHBaseTupleIterator(conn, scans, cubeInstance, dimensionsD, filterD, groupsCopD, valueDecoders, context, returnTupleInfo);
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/RegionScannerAdapter.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/RegionScannerAdapter.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/RegionScannerAdapter.java
index 6342c5c..9a171e9 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/RegionScannerAdapter.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/RegionScannerAdapter.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.hadoop.hbase.regionserver.ScannerContext;
/**
* @author yangli9
@@ -50,7 +51,7 @@ public class RegionScannerAdapter implements RegionScanner {
}
@Override
- public boolean next(List<Cell> result, int limit) throws IOException {
+ public boolean next(List<Cell> result, ScannerContext scannerContext) throws IOException {
return next(result);
}
@@ -60,7 +61,7 @@ public class RegionScannerAdapter implements RegionScanner {
}
@Override
- public boolean nextRaw(List<Cell> result, int limit) throws IOException {
+ public boolean nextRaw(List<Cell> result, ScannerContext scannerContext) throws IOException {
return next(result);
}
@@ -94,4 +95,9 @@ public class RegionScannerAdapter implements RegionScanner {
return Long.MAX_VALUE;
}
+ @Override
+ public int getBatch() {
+ return -1;
+ }
+
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/SerializedHBaseTupleIterator.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/SerializedHBaseTupleIterator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/SerializedHBaseTupleIterator.java
index e8dd5b9..d033c77 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/SerializedHBaseTupleIterator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/SerializedHBaseTupleIterator.java
@@ -25,7 +25,7 @@ import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
-import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.kylin.cube.CubeInstance;
import org.apache.kylin.cube.CubeSegment;
import org.apache.kylin.metadata.filter.TupleFilter;
@@ -57,7 +57,7 @@ public class SerializedHBaseTupleIterator implements ITupleIterator {
private int scanCount;
private ITuple next;
- public SerializedHBaseTupleIterator(HConnection conn, List<HBaseKeyRange> segmentKeyRanges, CubeInstance cube, //
+ public SerializedHBaseTupleIterator(Connection conn, List<HBaseKeyRange> segmentKeyRanges, CubeInstance cube, //
Set<TblColRef> dimensions, TupleFilter filter, Set<TblColRef> groupBy, List<RowValueDecoder> rowValueDecoders, //
StorageContext context, TupleInfo returnTupleInfo) {
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserver.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserver.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserver.java
index c7b650a..8dba1b1 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserver.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserver.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorBehavior;
@@ -99,7 +99,7 @@ public class AggregateRegionObserver extends BaseRegionObserver {
// start/end region operation & sync on scanner is suggested by the
// javadoc of RegionScanner.nextRaw()
// FIXME: will the lock still work when a iterator is returned? is it safe? Is readonly attribute helping here? by mhb
- HRegion region = ctxt.getEnvironment().getRegion();
+ Region region = ctxt.getEnvironment().getRegion();
region.startRegionOperation();
try {
synchronized (innerScanner) {
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java
index be26142..5857435 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregationScanner.java
@@ -25,6 +25,7 @@ import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.hadoop.hbase.regionserver.ScannerContext;
import org.apache.kylin.measure.MeasureAggregator;
import org.apache.kylin.storage.hbase.common.coprocessor.AggrKey;
import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorBehavior;
@@ -116,8 +117,8 @@ public class AggregationScanner implements RegionScanner {
}
@Override
- public boolean next(List<Cell> result, int limit) throws IOException {
- return outerScanner.next(result, limit);
+ public boolean next(List<Cell> result, ScannerContext scannerContext) throws IOException {
+ return outerScanner.next(result, scannerContext);
}
@Override
@@ -126,8 +127,8 @@ public class AggregationScanner implements RegionScanner {
}
@Override
- public boolean nextRaw(List<Cell> result, int limit) throws IOException {
- return outerScanner.nextRaw(result, limit);
+ public boolean nextRaw(List<Cell> result, ScannerContext scannerContext) throws IOException {
+ return outerScanner.nextRaw(result, scannerContext);
}
@Override
@@ -160,6 +161,11 @@ public class AggregationScanner implements RegionScanner {
return outerScanner.getMvccReadPoint();
}
+ @Override
+ public int getBatch() {
+ return outerScanner.getBatch();
+ }
+
private static class Stats {
long inputRows = 0;
long inputBytes = 0;
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverAggregationCache.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverAggregationCache.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverAggregationCache.java
index 8404262..331e34d 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverAggregationCache.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverAggregationCache.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.hadoop.hbase.regionserver.ScannerContext;
import org.apache.kylin.measure.MeasureAggregator;
import org.apache.kylin.storage.hbase.common.coprocessor.AggrKey;
import org.apache.kylin.storage.hbase.common.coprocessor.AggregationCache;
@@ -112,7 +113,7 @@ public class ObserverAggregationCache extends AggregationCache {
}
@Override
- public boolean next(List<Cell> result, int limit) throws IOException {
+ public boolean next(List<Cell> result, ScannerContext scannerContext) throws IOException {
return next(result);
}
@@ -122,7 +123,7 @@ public class ObserverAggregationCache extends AggregationCache {
}
@Override
- public boolean nextRaw(List<Cell> result, int limit) throws IOException {
+ public boolean nextRaw(List<Cell> result, ScannerContext scannerContext) throws IOException {
return next(result);
}
@@ -161,6 +162,11 @@ public class ObserverAggregationCache extends AggregationCache {
// AggregateRegionObserver.LOG.info("Kylin Scanner getMvccReadPoint()");
return Long.MAX_VALUE;
}
+
+ @Override
+ public int getBatch() {
+ return innerScanner.getBatch();
+ }
}
}
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverEnabler.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverEnabler.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverEnabler.java
index f0e9bed..e02ba1c 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverEnabler.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/ObserverEnabler.java
@@ -23,9 +23,9 @@ import java.util.Collection;
import java.util.Map;
import java.util.Set;
-import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.debug.BackdoorToggles;
@@ -60,7 +60,7 @@ public class ObserverEnabler {
static final Map<String, Boolean> CUBE_OVERRIDES = Maps.newConcurrentMap();
public static ResultScanner scanWithCoprocessorIfBeneficial(CubeSegment segment, Cuboid cuboid, TupleFilter tupleFiler, //
- Collection<TblColRef> groupBy, Collection<RowValueDecoder> rowValueDecoders, StorageContext context, HTableInterface table, Scan scan) throws IOException {
+ Collection<TblColRef> groupBy, Collection<RowValueDecoder> rowValueDecoders, StorageContext context, Table table, Scan scan) throws IOException {
if (context.isCoprocessorEnabled() == false) {
return table.getScanner(scan);
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
index 07a3cc3..c42cd05 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseEndpointRPC.java
@@ -26,8 +26,9 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicLong;
import java.util.zip.DataFormatException;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
@@ -50,10 +51,10 @@ import org.apache.kylin.storage.hbase.HBaseConnection;
import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorBehavior;
import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos;
import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest;
-import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse;
-import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitService;
import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitRequest.IntList;
+import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse;
import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitResponse.Stats;
+import org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.generated.CubeVisitProtos.CubeVisitService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -120,7 +121,7 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
final ImmutableBitSet selectedColBlocks = scanRequest.getSelectedColBlocks().set(0);
// globally shared connection, does not require close
- final HConnection conn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
+ final Connection conn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
final List<IntList> hbaseColumnsToGTIntList = Lists.newArrayList();
List<List<Integer>> hbaseColumnsToGT = getHBaseColumnsGTMapping(selectedColBlocks);
@@ -198,7 +199,7 @@ public class CubeHBaseEndpointRPC extends CubeHBaseRPC {
final boolean[] abnormalFinish = new boolean[1];
try {
- HTableInterface table = conn.getTable(cubeSeg.getStorageLocationIdentifier(), HBaseConnection.getCoprocessorPool());
+ Table table = conn.getTable(TableName.valueOf(cubeSeg.getStorageLocationIdentifier()), HBaseConnection.getCoprocessorPool());
final CubeVisitRequest request = builder.build();
final byte[] startKey = epRange.getFirst();
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
index a359d19..a940dfc 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/CubeHBaseScanRPC.java
@@ -24,11 +24,12 @@ import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.kylin.common.util.BytesUtil;
import org.apache.kylin.common.util.ImmutableBitSet;
import org.apache.kylin.common.util.ShardingHash;
@@ -164,8 +165,8 @@ public class CubeHBaseScanRPC extends CubeHBaseRPC {
// primary key (also the 0th column block) is always selected
final ImmutableBitSet selectedColBlocks = scanRequest.getSelectedColBlocks().set(0);
// globally shared connection, does not require close
- HConnection hbaseConn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
- final HTableInterface hbaseTable = hbaseConn.getTable(cubeSeg.getStorageLocationIdentifier());
+ Connection hbaseConn = HBaseConnection.get(cubeSeg.getCubeInstance().getConfig().getStorageUrl());
+ final Table hbaseTable = hbaseConn.getTable(TableName.valueOf(cubeSeg.getStorageLocationIdentifier()));
List<RawScan> rawScans = preparedHBaseScans(scanRequest.getGTScanRanges(), selectedColBlocks);
List<List<Integer>> hbaseColumnsToGT = getHBaseColumnsGTMapping(selectedColBlocks);
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
index 7d48c1a..be88c47 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/ExpectedSizeIterator.java
@@ -18,6 +18,11 @@
package org.apache.kylin.storage.hbase.cube.v2;
+import java.util.Iterator;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.TimeUnit;
+
import org.apache.commons.lang.NotImplementedException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
@@ -27,11 +32,6 @@ import org.apache.kylin.storage.hbase.HBaseConnection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.util.Iterator;
-import java.util.concurrent.ArrayBlockingQueue;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.TimeUnit;
-
class ExpectedSizeIterator implements Iterator<byte[]> {
private static final Logger logger = LoggerFactory.getLogger(ExpectedSizeIterator.class);
http://git-wip-us.apache.org/repos/asf/kylin/blob/b70684e6/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
index b29d0d1..2d449d4 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
@@ -145,7 +145,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
if (shardLength == 0) {
return;
}
- byte[] regionStartKey = ArrayUtils.isEmpty(region.getStartKey()) ? new byte[shardLength] : region.getStartKey();
+ byte[] regionStartKey = ArrayUtils.isEmpty(region.getRegionInfo().getStartKey()) ? new byte[shardLength] : region.getRegionInfo().getStartKey();
Bytes.putBytes(rawScan.startKey, 0, regionStartKey, 0, shardLength);
Bytes.putBytes(rawScan.endKey, 0, regionStartKey, 0, shardLength);
}
@@ -181,7 +181,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
try {
this.serviceStartTime = System.currentTimeMillis();
- region = env.getRegion();
+ region = (HRegion)env.getRegion();
region.startRegionOperation();
// if user change kylin.properties on kylin server, need to manually redeploy coprocessor jar to update KylinConfig of Env.
[05/10] kylin git commit: KYLIN-1965: bug fix
Posted by li...@apache.org.
KYLIN-1965: bug fix
Signed-off-by: Jason <ji...@163.com>
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/08f7ddab
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/08f7ddab
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/08f7ddab
Branch: refs/heads/1.5.x-HBase1.x
Commit: 08f7ddab1c63db4bcd4d8d461a8a21da0586512d
Parents: 7bc420b
Author: kangkaisen <ka...@live.com>
Authored: Mon Sep 5 14:11:37 2016 +0800
Committer: Jason <ji...@163.com>
Committed: Tue Sep 6 11:04:30 2016 +0800
----------------------------------------------------------------------
webapp/app/js/controllers/cubeMeasures.js | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/08f7ddab/webapp/app/js/controllers/cubeMeasures.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/controllers/cubeMeasures.js b/webapp/app/js/controllers/cubeMeasures.js
index 794d2b2..2f191f9 100644
--- a/webapp/app/js/controllers/cubeMeasures.js
+++ b/webapp/app/js/controllers/cubeMeasures.js
@@ -206,7 +206,7 @@ KylinApp.controller('CubeMeasuresCtrl', function ($scope, $modal,MetaModel,cubes
names.push(measures[i].name);
}
var index = names.indexOf(newMeasure.name);
- return index > -1;
+ return (index > -1 && index != $scope.updateMeasureStatus.editIndex);
}
$scope.nextParameterInit = function(){
[08/10] kylin git commit: KYLIN-1698 Also support int partition
column using format yyyyMMdd
Posted by li...@apache.org.
KYLIN-1698 Also support int partition column using format yyyyMMdd
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/eb92f96f
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/eb92f96f
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/eb92f96f
Branch: refs/heads/1.5.x-HBase1.x
Commit: eb92f96fc598a2add7a80a49768cd4f2d4184c76
Parents: ae72c25
Author: Li Yang <li...@apache.org>
Authored: Wed Sep 7 11:30:48 2016 +0800
Committer: Li Yang <li...@apache.org>
Committed: Wed Sep 7 11:30:48 2016 +0800
----------------------------------------------------------------------
.../kylin/metadata/model/PartitionDesc.java | 25 +++++++++++++++++---
1 file changed, 22 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/eb92f96f/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java
----------------------------------------------------------------------
diff --git a/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java b/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java
index 598e6d0..6487bfa 100644
--- a/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java
+++ b/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java
@@ -91,6 +91,14 @@ public class PartitionDesc {
partitionConditionBuilder = (IPartitionConditionBuilder) ClassUtil.newInstance(partitionConditionBuilderClz);
}
+ public boolean partitionColumnIsYmdInt() {
+ if (partitionDateColumnRef == null)
+ return false;
+
+ DataType type = partitionDateColumnRef.getType();
+ return type.isInt();
+ }
+
public boolean partitionColumnIsTimeMillis() {
if (partitionDateColumnRef == null)
return false;
@@ -175,8 +183,10 @@ public class PartitionDesc {
String partitionDateColumnName = partDesc.getPartitionDateColumn();
String partitionTimeColumnName = partDesc.getPartitionTimeColumn();
- if (partDesc.partitionColumnIsTimeMillis()) {
- buildSingleColumnRangeCondition(builder, partitionDateColumnName, startInclusive, endExclusive, tableAlias);
+ if (partDesc.partitionColumnIsYmdInt()) {
+ buildSingleColumnRangeCondAsYmdInt(builder, partitionDateColumnName, startInclusive, endExclusive, tableAlias);
+ } else if (partDesc.partitionColumnIsTimeMillis()) {
+ buildSingleColumnRangeCondAsTimeMillis(builder, partitionDateColumnName, startInclusive, endExclusive, tableAlias);
} else if (partitionDateColumnName != null && partitionTimeColumnName == null) {
buildSingleColumnRangeCondition(builder, partitionDateColumnName, startInclusive, endExclusive, partDesc.getPartitionDateFormat(), tableAlias);
} else if (partitionDateColumnName == null && partitionTimeColumnName != null) {
@@ -201,7 +211,7 @@ public class PartitionDesc {
return columnName;
}
- private static void buildSingleColumnRangeCondition(StringBuilder builder, String partitionColumnName, long startInclusive, long endExclusive, Map<String, String> tableAlias) {
+ private static void buildSingleColumnRangeCondAsTimeMillis(StringBuilder builder, String partitionColumnName, long startInclusive, long endExclusive, Map<String, String> tableAlias) {
partitionColumnName = replaceColumnNameWithAlias(partitionColumnName, tableAlias);
if (startInclusive > 0) {
builder.append(partitionColumnName + " >= " + startInclusive);
@@ -210,6 +220,15 @@ public class PartitionDesc {
builder.append(partitionColumnName + " < " + endExclusive);
}
+ private static void buildSingleColumnRangeCondAsYmdInt(StringBuilder builder, String partitionColumnName, long startInclusive, long endExclusive, Map<String, String> tableAlias) {
+ partitionColumnName = replaceColumnNameWithAlias(partitionColumnName, tableAlias);
+ if (startInclusive > 0) {
+ builder.append(partitionColumnName + " >= " + DateFormat.formatToDateStr(startInclusive, DateFormat.COMPACT_DATE_PATTERN));
+ builder.append(" AND ");
+ }
+ builder.append(partitionColumnName + " < " + DateFormat.formatToDateStr(endExclusive, DateFormat.COMPACT_DATE_PATTERN));
+ }
+
private static void buildSingleColumnRangeCondition(StringBuilder builder, String partitionColumnName, long startInclusive, long endExclusive, String partitionColumnDateFormat, Map<String, String> tableAlias) {
partitionColumnName = replaceColumnNameWithAlias(partitionColumnName, tableAlias);
if (startInclusive > 0) {
[02/10] kylin git commit: KYLIN-1984: Disable compress in default
configuration
Posted by li...@apache.org.
KYLIN-1984: Disable compress in default configuration
Signed-off-by: Hongbin Ma <ma...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/99d0d753
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/99d0d753
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/99d0d753
Branch: refs/heads/1.5.x-HBase1.x
Commit: 99d0d7535ae307a3b34a8ffd23b5d2d50559eea5
Parents: 663820f
Author: Yiming Liu <li...@gmail.com>
Authored: Sun Sep 4 17:22:20 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Sun Sep 4 21:15:48 2016 +0800
----------------------------------------------------------------------
build/conf/kylin.properties | 4 ++--
build/conf/kylin_hive_conf.xml | 13 ++++++++++++-
build/conf/kylin_job_conf.xml | 15 ++++++++++++---
build/conf/kylin_job_conf_inmem.xml | 15 ++++++++++++---
.../org/apache/kylin/common/KylinConfigBase.java | 2 +-
.../kylin/storage/hbase/steps/CubeHTableUtil.java | 5 +++--
6 files changed, 42 insertions(+), 12 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/99d0d753/build/conf/kylin.properties
----------------------------------------------------------------------
diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
index 5607336..c20488a 100644
--- a/build/conf/kylin.properties
+++ b/build/conf/kylin.properties
@@ -54,8 +54,8 @@ kylin.storage.cleanup.time.threshold=172800000
# Working folder in HDFS, make sure user has the right access to the hdfs directory
kylin.hdfs.working.dir=/kylin
-# Compression codec for htable, valid value [snappy, lzo, gzip, lz4]
-kylin.hbase.default.compression.codec=snappy
+# Compression codec for htable, valid value [none, snappy, lzo, gzip, lz4]
+kylin.hbase.default.compression.codec=none
# HBase Cluster FileSystem, which serving hbase, format as hdfs://hbase-cluster:8020
# Leave empty if hbase running on same cluster with hive and mapreduce
http://git-wip-us.apache.org/repos/asf/kylin/blob/99d0d753/build/conf/kylin_hive_conf.xml
----------------------------------------------------------------------
diff --git a/build/conf/kylin_hive_conf.xml b/build/conf/kylin_hive_conf.xml
index 3d6109b..30c4feb 100644
--- a/build/conf/kylin_hive_conf.xml
+++ b/build/conf/kylin_hive_conf.xml
@@ -39,17 +39,28 @@
<description>enable map-side join</description>
</property>
+ <!--
+ The default map outputs compress codec is org.apache.hadoop.io.compress.DefaultCodec,
+ if SnappyCodec is supported, org.apache.hadoop.io.compress.SnappyCodec could be used.
+ -->
+ <!--
<property>
<name>mapreduce.map.output.compress.codec</name>
<value>org.apache.hadoop.io.compress.SnappyCodec</value>
<description></description>
</property>
+ -->
+ <!--
+ The default job outputs compress codec is org.apache.hadoop.io.compress.DefaultCodec,
+ if SnappyCodec is supported, org.apache.hadoop.io.compress.SnappyCodec could be used.
+ -->
+ <!--
<property>
<name>mapreduce.output.fileoutputformat.compress.codec</name>
<value>org.apache.hadoop.io.compress.SnappyCodec</value>
<description></description>
</property>
-
+ -->
<property>
<name>mapred.output.compression.type</name>
<value>BLOCK</value>
http://git-wip-us.apache.org/repos/asf/kylin/blob/99d0d753/build/conf/kylin_job_conf.xml
----------------------------------------------------------------------
diff --git a/build/conf/kylin_job_conf.xml b/build/conf/kylin_job_conf.xml
index 877e82f..96b806c 100644
--- a/build/conf/kylin_job_conf.xml
+++ b/build/conf/kylin_job_conf.xml
@@ -31,26 +31,35 @@
<description>Compress map outputs</description>
</property>
+ <!--
+ The default map outputs compress codec is org.apache.hadoop.io.compress.DefaultCodec,
+ if SnappyCodec is supported, org.apache.hadoop.io.compress.SnappyCodec could be used.
+ -->
+ <!--
<property>
<name>mapred.map.output.compression.codec</name>
<value>org.apache.hadoop.io.compress.SnappyCodec</value>
<description>The compression codec to use for map outputs
</description>
</property>
-
+ -->
<property>
<name>mapred.output.compress</name>
<value>true</value>
<description>Compress the output of a MapReduce job</description>
</property>
-
+ <!--
+ The default job outputs compress codec is org.apache.hadoop.io.compress.DefaultCodec,
+ if SnappyCodec is supported, org.apache.hadoop.io.compress.SnappyCodec could be used.
+ -->
+ <!--
<property>
<name>mapred.output.compression.codec</name>
<value>org.apache.hadoop.io.compress.SnappyCodec</value>
<description>The compression codec to use for job outputs
</description>
</property>
-
+ -->
<property>
<name>mapred.output.compression.type</name>
<value>BLOCK</value>
http://git-wip-us.apache.org/repos/asf/kylin/blob/99d0d753/build/conf/kylin_job_conf_inmem.xml
----------------------------------------------------------------------
diff --git a/build/conf/kylin_job_conf_inmem.xml b/build/conf/kylin_job_conf_inmem.xml
index 1cd809d..fea2f68 100644
--- a/build/conf/kylin_job_conf_inmem.xml
+++ b/build/conf/kylin_job_conf_inmem.xml
@@ -31,26 +31,35 @@
<description>Compress map outputs</description>
</property>
+ <!--
+ The default map outputs compress codec is org.apache.hadoop.io.compress.DefaultCodec,
+ if SnappyCodec is supported, org.apache.hadoop.io.compress.SnappyCodec could be used.
+ -->
+ <!--
<property>
<name>mapred.map.output.compression.codec</name>
<value>org.apache.hadoop.io.compress.SnappyCodec</value>
<description>The compression codec to use for map outputs
</description>
</property>
-
+ -->
<property>
<name>mapred.output.compress</name>
<value>true</value>
<description>Compress the output of a MapReduce job</description>
</property>
-
+ <!--
+ The default job outputs compress codec is org.apache.hadoop.io.compress.DefaultCodec,
+ if SnappyCodec is supported, org.apache.hadoop.io.compress.SnappyCodec could be used.
+ -->
+ <!--
<property>
<name>mapred.output.compression.codec</name>
<value>org.apache.hadoop.io.compress.SnappyCodec</value>
<description>The compression codec to use for job outputs
</description>
</property>
-
+ -->
<property>
<name>mapred.output.compression.type</name>
<value>BLOCK</value>
http://git-wip-us.apache.org/repos/asf/kylin/blob/99d0d753/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 1390e24..f0c91da 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -615,7 +615,7 @@ abstract public class KylinConfigBase implements Serializable {
}
public String getHbaseDefaultCompressionCodec() {
- return getOptional("kylin.hbase.default.compression.codec", "");
+ return getOptional("kylin.hbase.default.compression.codec", "none");
}
public String getHbaseDefaultEncoding() {
http://git-wip-us.apache.org/repos/asf/kylin/blob/99d0d753/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
index fe65598..9b487a7 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
@@ -183,8 +183,9 @@ public class CubeHTableUtil {
cf.setCompressionType(Algorithm.LZ4);
break;
}
+ case "none":
default: {
- logger.info("hbase will not user any compression algorithm to compress data");
+ logger.info("hbase will not use any compression algorithm to compress data");
cf.setCompressionType(Algorithm.NONE);
}
}
@@ -194,7 +195,7 @@ public class CubeHTableUtil {
DataBlockEncoding encoding = DataBlockEncoding.valueOf(encodingStr);
cf.setDataBlockEncoding(encoding);
} catch (Exception e) {
- logger.info("hbase will not use any encoding");
+ logger.info("hbase will not use any encoding", e);
cf.setDataBlockEncoding(DataBlockEncoding.NONE);
}
[04/10] kylin git commit: KYLIN-Storage-Engine-kylin
Posted by li...@apache.org.
KYLIN-Storage-Engine-kylin
Signed-off-by: Jason <ji...@163.com>
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/7bc420b2
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/7bc420b2
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/7bc420b2
Branch: refs/heads/1.5.x-HBase1.x
Commit: 7bc420b211f506c789a15d214b6730591a583ea0
Parents: a9c8f40
Author: zx chen <34...@qq.com>
Authored: Fri Sep 2 18:19:54 2016 +0800
Committer: Jason <ji...@163.com>
Committed: Mon Sep 5 12:06:09 2016 +0800
----------------------------------------------------------------------
webapp/app/js/model/cubeDescModel.js | 6 +++---
webapp/app/js/services/kylinProperties.js | 16 ++++++++++++++++
2 files changed, 19 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/7bc420b2/webapp/app/js/model/cubeDescModel.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/model/cubeDescModel.js b/webapp/app/js/model/cubeDescModel.js
index c981249..95a0b13 100644
--- a/webapp/app/js/model/cubeDescModel.js
+++ b/webapp/app/js/model/cubeDescModel.js
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-KylinApp.service('CubeDescModel', function () {
+KylinApp.service('CubeDescModel', function (kylinConfig) {
this.cubeMetaFrame = {};
@@ -57,8 +57,8 @@ KylinApp.service('CubeDescModel', function () {
"retention_range": "0",
"status_need_notify":['ERROR', 'DISCARDED', 'SUCCEED'],
"auto_merge_time_ranges": [604800000, 2419200000],
- "engine_type": 2,
- "storage_type":2,
+ "engine_type": kylinConfig.getCubeEng(),
+ "storage_type":kylinConfig.getStorageEng(),
"override_kylin_properties":{}
};
http://git-wip-us.apache.org/repos/asf/kylin/blob/7bc420b2/webapp/app/js/services/kylinProperties.js
----------------------------------------------------------------------
diff --git a/webapp/app/js/services/kylinProperties.js b/webapp/app/js/services/kylinProperties.js
index 68e8766..7110b8c 100644
--- a/webapp/app/js/services/kylinProperties.js
+++ b/webapp/app/js/services/kylinProperties.js
@@ -71,6 +71,22 @@ KylinApp.service('kylinConfig', function (AdminService, $log) {
}
return this.hiveLimit;
}
+
+ this.getStorageEng = function () {
+ this.StorageEng = this.getProperty("kylin.default.storage.engine").trim();
+ if (!this.StorageEng) {
+ return 2;
+ }
+ return this.StorageEng;
+ }
+
+ this.getCubeEng = function () {
+ this.CubeEng = this.getProperty("kylin.default.cube.engine").trim();
+ if (!this.CubeEng) {
+ return 2;
+ }
+ return this.CubeEng;
+ }
//fill config info for Config from backend
this.initWebConfigInfo = function () {
[07/10] kylin git commit: KYLIN-1962 use one file by default
Posted by li...@apache.org.
KYLIN-1962 use one file by default
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/ae72c257
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/ae72c257
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/ae72c257
Branch: refs/heads/1.5.x-HBase1.x
Commit: ae72c25705002f70df5996a4101acacbd6af7db6
Parents: 6c35c85
Author: shaofengshi <sh...@apache.org>
Authored: Tue Sep 6 10:40:24 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Tue Sep 6 12:00:54 2016 +0800
----------------------------------------------------------------------
build/conf/kylin.properties | 41 ++++++++++++++++++-
build/conf/kylin_account.properties | 42 --------------------
.../test_case_data/sandbox/kylin.properties | 5 +++
.../sandbox/kylin_account.properties | 13 ------
4 files changed, 44 insertions(+), 57 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/ae72c257/build/conf/kylin.properties
----------------------------------------------------------------------
diff --git a/build/conf/kylin.properties b/build/conf/kylin.properties
index c20488a..ed86bdb 100644
--- a/build/conf/kylin.properties
+++ b/build/conf/kylin.properties
@@ -84,9 +84,14 @@ kylin.job.run.as.remote.cmd=false
# Only necessary when kylin.job.run.as.remote.cmd=true
kylin.job.remote.cli.hostname=
-
kylin.job.remote.cli.port=22
+# Only necessary when kylin.job.run.as.remote.cmd=true
+kylin.job.remote.cli.username=
+
+# Only necessary when kylin.job.run.as.remote.cmd=true
+kylin.job.remote.cli.password=
+
# Used by test cases to prepare synthetic data for sample cube
kylin.job.remote.cli.working.dir=/tmp/kylin
@@ -146,11 +151,43 @@ kylin.query.cache.enabled=true
# with "testing" profile, user can use pre-defined name/pwd like KYLIN/ADMIN to login
kylin.security.profile=testing
+### SECURITY ###
+# Default roles and admin roles in LDAP, for ldap and saml
+acl.defaultRole=ROLE_ANALYST,ROLE_MODELER
+acl.adminRole=ROLE_ADMIN
+
+# LDAP authentication configuration
+ldap.server=ldap://ldap_server:389
+ldap.username=
+ldap.password=
+
+# LDAP user account directory;
+ldap.user.searchBase=
+ldap.user.searchPattern=
+ldap.user.groupSearchBase=
+
+# LDAP service account directory
+ldap.service.searchBase=
+ldap.service.searchPattern=
+ldap.service.groupSearchBase=
+
+## SAML configurations for SSO
+# SAML IDP metadata file location
+saml.metadata.file=classpath:sso_metadata.xml
+saml.metadata.entityBaseURL=https://hostname/kylin
+saml.context.scheme=https
+saml.context.serverName=hostname
+saml.context.serverPort=443
+saml.context.contextPath=/kylin
+
### MAIL ###
# If true, will send email notification;
mail.enabled=false
-
+mail.host=
+mail.username=
+mail.password=
+mail.sender=
### WEB ###
# Help info, format{name|displayName|link}, optional
http://git-wip-us.apache.org/repos/asf/kylin/blob/ae72c257/build/conf/kylin_account.properties
----------------------------------------------------------------------
diff --git a/build/conf/kylin_account.properties b/build/conf/kylin_account.properties
deleted file mode 100644
index e98c142..0000000
--- a/build/conf/kylin_account.properties
+++ /dev/null
@@ -1,42 +0,0 @@
-### JOB ###
-
-# Only necessary when kylin.job.run.as.remote.cmd=true
-kylin.job.remote.cli.username=
-
-# Only necessary when kylin.job.run.as.remote.cmd=true
-kylin.job.remote.cli.password=
-
-### SECURITY ###
-# Default roles and admin roles in LDAP, for ldap and saml
-acl.defaultRole=ROLE_ANALYST,ROLE_MODELER
-acl.adminRole=ROLE_ADMIN
-
-# LDAP authentication configuration
-ldap.server=ldap://ldap_server:389
-ldap.username=
-ldap.password=
-
-# LDAP user account directory;
-ldap.user.searchBase=
-ldap.user.searchPattern=
-ldap.user.groupSearchBase=
-
-# LDAP service account directory
-ldap.service.searchBase=
-ldap.service.searchPattern=
-ldap.service.groupSearchBase=
-
-# SAML configurations for SSO
-# SAML IDP metadata file location
-saml.metadata.file=classpath:sso_metadata.xml
-saml.metadata.entityBaseURL=https://hostname/kylin
-saml.context.scheme=https
-saml.context.serverName=hostname
-saml.context.serverPort=443
-saml.context.contextPath=/kylin
-
-### MAIL ###
-mail.host=
-mail.username=
-mail.password=
-mail.sender=
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/kylin/blob/ae72c257/examples/test_case_data/sandbox/kylin.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties
index a6f89df..1d1d9ba 100644
--- a/examples/test_case_data/sandbox/kylin.properties
+++ b/examples/test_case_data/sandbox/kylin.properties
@@ -67,6 +67,11 @@ kylin.job.run.as.remote.cmd=false
# Only necessary when kylin.job.run.as.remote.cmd=true
kylin.job.remote.cli.hostname=sandbox
+kylin.job.remote.cli.username=root
+
+# Only necessary when kylin.job.run.as.remote.cmd=true
+kylin.job.remote.cli.password=hadoop
+
# Used by test cases to prepare synthetic data for sample cube
kylin.job.remote.cli.working.dir=/tmp/kylin
http://git-wip-us.apache.org/repos/asf/kylin/blob/ae72c257/examples/test_case_data/sandbox/kylin_account.properties
----------------------------------------------------------------------
diff --git a/examples/test_case_data/sandbox/kylin_account.properties b/examples/test_case_data/sandbox/kylin_account.properties
deleted file mode 100644
index 0dfa5f7..0000000
--- a/examples/test_case_data/sandbox/kylin_account.properties
+++ /dev/null
@@ -1,13 +0,0 @@
-### JOB ###
-
-# Only necessary when kylin.job.run.as.remote.cmd=true
-kylin.job.remote.cli.username=root
-
-# Only necessary when kylin.job.run.as.remote.cmd=true
-kylin.job.remote.cli.password=hadoop
-
-### MAIL ###
-mail.host=
-mail.username=
-mail.password=
-mail.sender=
[03/10] kylin git commit: KYLIN-1984 fix CI
Posted by li...@apache.org.
KYLIN-1984 fix CI
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/a9c8f40e
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/a9c8f40e
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/a9c8f40e
Branch: refs/heads/1.5.x-HBase1.x
Commit: a9c8f40ee7423180d60313047b329d0c91d0dfb9
Parents: 99d0d75
Author: Hongbin Ma <ma...@apache.org>
Authored: Mon Sep 5 11:09:59 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Mon Sep 5 11:09:59 2016 +0800
----------------------------------------------------------------------
.../test/java/org/apache/kylin/cube/CubeSpecificConfigTest.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/a9c8f40e/core-cube/src/test/java/org/apache/kylin/cube/CubeSpecificConfigTest.java
----------------------------------------------------------------------
diff --git a/core-cube/src/test/java/org/apache/kylin/cube/CubeSpecificConfigTest.java b/core-cube/src/test/java/org/apache/kylin/cube/CubeSpecificConfigTest.java
index 18d0a27..dd52f82 100644
--- a/core-cube/src/test/java/org/apache/kylin/cube/CubeSpecificConfigTest.java
+++ b/core-cube/src/test/java/org/apache/kylin/cube/CubeSpecificConfigTest.java
@@ -54,7 +54,7 @@ public class CubeSpecificConfigTest extends LocalFileMetadataTestCase {
}
private void verifyOverride(KylinConfig base, KylinConfig override) {
- assertEquals("", base.getHbaseDefaultCompressionCodec());
+ assertEquals("none", base.getHbaseDefaultCompressionCodec());
assertEquals("lz4", override.getHbaseDefaultCompressionCodec());
}
}
[06/10] kylin git commit: KYLIN-1962: minor,
fix Spring Security read Kylin properties
Posted by li...@apache.org.
KYLIN-1962: minor, fix Spring Security read Kylin properties
Signed-off-by: shaofengshi <sh...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/6c35c859
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/6c35c859
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/6c35c859
Branch: refs/heads/1.5.x-HBase1.x
Commit: 6c35c8594c34e7401a5adb43f649b1ed62e18841
Parents: 08f7dda
Author: Yiming Liu <li...@gmail.com>
Authored: Tue Sep 6 00:57:19 2016 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Tue Sep 6 12:00:47 2016 +0800
----------------------------------------------------------------------
.../org/apache/kylin/common/KylinConfig.java | 38 ++++++++++----------
.../security/PasswordPlaceholderConfigurer.java | 20 +++++++++++
.../org/apache/kylin/tool/DiagnosisInfoCLI.java | 2 +-
3 files changed, 40 insertions(+), 20 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/kylin/blob/6c35c859/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
----------------------------------------------------------------------
diff --git a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
index 241170a..f134ad4 100644
--- a/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
+++ b/core-common/src/main/java/org/apache/kylin/common/KylinConfig.java
@@ -47,7 +47,7 @@ public class KylinConfig extends KylinConfigBase {
/** Kylin properties file name */
public static final String KYLIN_CONF_PROPERTIES_FILE = "kylin.properties";
- public static final String KYLIN_SECURITY_CONF_PROPERTIES_FILE = "kylin_account.properties";
+ public static final String KYLIN_ACCOUNT_CONF_PROPERTIES_FILE = "kylin_account.properties";
public static final String KYLIN_CONF = "KYLIN_CONF";
// static cached instances
@@ -205,11 +205,11 @@ public class KylinConfig extends KylinConfigBase {
return getKylinPropertiesFile(path);
}
- static File getKylinSecurityPropertiesFile() {
+ static File getKylinAccountPropertiesFile() {
String kylinConfHome = System.getProperty(KYLIN_CONF);
if (!StringUtils.isEmpty(kylinConfHome)) {
logger.info("Use KYLIN_CONF=" + kylinConfHome);
- return getKylinSecurityPropertiesFile(kylinConfHome);
+ return getKylinAccountPropertiesFile(kylinConfHome);
}
logger.warn("KYLIN_CONF property was not set, will seek KYLIN_HOME env variable");
@@ -219,10 +219,10 @@ public class KylinConfig extends KylinConfigBase {
throw new KylinConfigCannotInitException("Didn't find KYLIN_CONF or KYLIN_HOME, please set one of them");
String path = kylinHome + File.separator + "conf";
- return getKylinSecurityPropertiesFile(path);
+ return getKylinAccountPropertiesFile(path);
}
- private static Properties getKylinProperties() {
+ public static Properties getKylinProperties() {
File propFile = getKylinPropertiesFile();
if (propFile == null || !propFile.exists()) {
logger.error("fail to locate " + KYLIN_CONF_PROPERTIES_FILE);
@@ -243,22 +243,22 @@ public class KylinConfig extends KylinConfigBase {
conf.putAll(propOverride);
}
- File securityPropFile = getKylinSecurityPropertiesFile();
- if (securityPropFile.exists()) {
- FileInputStream ois = new FileInputStream(securityPropFile);
- Properties propSecurity = new Properties();
- propSecurity.load(ois);
+ File accountPropFile = getKylinAccountPropertiesFile();
+ if (accountPropFile.exists()) {
+ FileInputStream ois = new FileInputStream(accountPropFile);
+ Properties propAccount = new Properties();
+ propAccount.load(ois);
IOUtils.closeQuietly(ois);
- conf.putAll(propSecurity);
+ conf.putAll(propAccount);
}
- File securityPropOverrideFile = new File(securityPropFile.getParentFile(), securityPropFile.getName() + ".override");
- if (securityPropOverrideFile.exists()) {
- FileInputStream ois = new FileInputStream(securityPropOverrideFile);
- Properties propSecurityOverride = new Properties();
- propSecurityOverride.load(ois);
+ File accountPropOverrideFile = new File(accountPropFile.getParentFile(), accountPropFile.getName() + ".override");
+ if (accountPropOverrideFile.exists()) {
+ FileInputStream ois = new FileInputStream(accountPropOverrideFile);
+ Properties propAccountOverride = new Properties();
+ propAccountOverride.load(ois);
IOUtils.closeQuietly(ois);
- conf.putAll(propSecurityOverride);
+ conf.putAll(propAccountOverride);
}
} catch (IOException e) {
@@ -282,12 +282,12 @@ public class KylinConfig extends KylinConfigBase {
return new File(path, KYLIN_CONF_PROPERTIES_FILE);
}
- private static File getKylinSecurityPropertiesFile(String path) {
+ private static File getKylinAccountPropertiesFile(String path) {
if (path == null) {
return null;
}
- return new File(path, KYLIN_SECURITY_CONF_PROPERTIES_FILE);
+ return new File(path, KYLIN_ACCOUNT_CONF_PROPERTIES_FILE);
}
public static void setSandboxEnvIfPossible() {
http://git-wip-us.apache.org/repos/asf/kylin/blob/6c35c859/server-base/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java b/server-base/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
index 5381b14..092d73a 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
@@ -18,13 +18,21 @@
package org.apache.kylin.rest.security;
+import java.io.InputStream;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.nio.charset.Charset;
import java.util.Properties;
import javax.crypto.Cipher;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.io.IOUtils;
+import org.apache.kylin.common.KylinConfig;
import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer;
+import org.springframework.core.io.InputStreamResource;
+import org.springframework.core.io.Resource;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
/**
@@ -38,7 +46,19 @@ public class PasswordPlaceholderConfigurer extends PropertyPlaceholderConfigurer
*/
private static byte[] key = { 0x74, 0x68, 0x69, 0x73, 0x49, 0x73, 0x41, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x4b, 0x65, 0x79 };
+ /**
+ * The PasswordPlaceholderConfigurer will read Kylin properties as the Spring resource
+ */
public PasswordPlaceholderConfigurer() {
+ Resource[] resources = new Resource[1];
+ Properties prop = KylinConfig.getKylinProperties();
+ StringWriter writer = new StringWriter();
+ prop.list(new PrintWriter(writer));
+ String propString = writer.getBuffer().toString();
+ IOUtils.closeQuietly(writer);
+ InputStream is = IOUtils.toInputStream(propString, Charset.defaultCharset());
+ resources[0] = new InputStreamResource(is);
+ this.setLocations(resources);
}
public static String encrypt(String strToEncrypt) {
http://git-wip-us.apache.org/repos/asf/kylin/blob/6c35c859/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java b/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
index e77ac3b..f93aaf2 100644
--- a/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
@@ -184,7 +184,7 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
File[] confFiles = srcConfDir.listFiles();
if (confFiles != null) {
for (File confFile : confFiles) {
- if (!KylinConfig.KYLIN_SECURITY_CONF_PROPERTIES_FILE.equals(confFile.getName())) {
+ if (!KylinConfig.KYLIN_ACCOUNT_CONF_PROPERTIES_FILE.equals(confFile.getName())) {
FileUtils.copyFileToDirectory(confFile, destConfDir);
}
}