You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by sh...@apache.org on 2018/12/06 07:54:46 UTC
[kylin] 02/03: KYLIN-3518 Fix Coprocessor NPE problem on hbase 2
This is an automated email from the ASF dual-hosted git repository.
shaofengshi pushed a commit to branch 2.5.x-hadoop3.1
in repository https://gitbox.apache.org/repos/asf/kylin.git
commit 1e9305f8844577e3f5f60c85c0a835ae0a79ca92
Author: Lijun Cao <>
AuthorDate: Thu Aug 30 09:28:54 2018 +0800
KYLIN-3518 Fix Coprocessor NPE problem on hbase 2
Signed-off-by: shaofengshi <sh...@apache.org>
---
build/deploy/server.xml | 2 +-
build/script/elimate-jar-conflict.sh | 20 ++++++++++
build/script/prepare.sh | 3 ++
.../v2/coprocessor/endpoint/CubeVisitService.java | 10 ++---
.../hbase/lookup/LookupTableToHFileJob.java | 24 +++++------
.../kylin/storage/hbase/steps/CubeHTableUtil.java | 46 +++++++++++-----------
.../storage/hbase/util/DeployCoprocessorCLI.java | 46 +++++++++++-----------
7 files changed, 89 insertions(+), 62 deletions(-)
diff --git a/build/deploy/server.xml b/build/deploy/server.xml
index 96f329b..920be25 100644
--- a/build/deploy/server.xml
+++ b/build/deploy/server.xml
@@ -26,7 +26,7 @@
<!--APR library loader. Documentation at /docs/apr.html -->
<Listener className="org.apache.catalina.core.AprLifecycleListener" SSLEngine="on" />
<!--Initialize Jasper prior to webapps are loaded. Documentation at /docs/jasper-howto.html -->
- <Listener className="org.apache.catalina.core.JasperListener" />
+ <!-- <Listener className="org.apache.catalina.core.JasperListener" /> -->
<!-- Prevent memory leaks due to use of particular java/javax APIs-->
<Listener className="org.apache.catalina.core.JreMemoryLeakPreventionListener" />
<Listener className="org.apache.catalina.mbeans.GlobalResourcesLifecycleListener" />
diff --git a/build/script/elimate-jar-conflict.sh b/build/script/elimate-jar-conflict.sh
new file mode 100644
index 0000000..d02a874
--- /dev/null
+++ b/build/script/elimate-jar-conflict.sh
@@ -0,0 +1,20 @@
+#!/usr/bin/env bash
+
+current_dir=`pwd`
+cd ${current_dir}/build/tomcat/webapps
+unzip kylin.war && rm -f kylin.war
+cd WEB-INF/lib
+#remove slf4j-api-1.7.21.jar to solve slf4j conflict
+rm -f slf4j-api-1.7.21.jar
+mkdir modify_avatica_jar && mv avatica-1.10.0.jar modify_avatica_jar
+cd modify_avatica_jar
+#remove org/slf4j in avatica-1.10.0.jar and repackage it to solve slf4j conflict
+unzip avatica-1.10.0.jar && rm -f avatica-1.10.0.jar
+rm -rf org/slf4j && jar -cf avatica-1.10.0.jar ./
+rm -rf `ls | egrep -v avatica-1.10.0.jar`
+mv avatica-1.10.0.jar ..
+cd .. && rm -rf modify_avatica_jar
+cd ${current_dir}/build/tomcat/webapps
+#repackage kylin.war
+jar -cf kylin.war ./ && rm -rf `ls | egrep -v kylin.war`
+cd ${current_dir}
\ No newline at end of file
diff --git a/build/script/prepare.sh b/build/script/prepare.sh
index deaf58d..be9dd9d 100755
--- a/build/script/prepare.sh
+++ b/build/script/prepare.sh
@@ -31,6 +31,9 @@ export version
sh build/script/prepare-libs.sh || { exit 1; }
cp server/target/kylin-server-${version}.war build/tomcat/webapps/kylin.war
+
+sh build/script/elimate-jar-conflict.sh
+
chmod 644 build/tomcat/webapps/kylin.war
echo "add js css to war"
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
index ded3500..2beddc7 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/cube/v2/coprocessor/endpoint/CubeVisitService.java
@@ -24,6 +24,7 @@ import java.lang.management.ManagementFactory;
import java.net.InetAddress;
import java.nio.BufferOverflowException;
import java.nio.ByteBuffer;
+import java.util.Collections;
import java.util.Iterator;
import java.util.List;
@@ -31,12 +32,11 @@ import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.ArrayUtils;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.coprocessor.CoprocessorException;
-import org.apache.hadoop.hbase.coprocessor.CoprocessorService;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
@@ -78,7 +78,7 @@ import com.sun.management.OperatingSystemMXBean;
@SuppressWarnings("unused")
//used in hbase endpoint
-public class CubeVisitService extends CubeVisitProtos.CubeVisitService implements Coprocessor, CoprocessorService {
+public class CubeVisitService extends CubeVisitProtos.CubeVisitService implements RegionCoprocessor {
private static final Logger logger = LoggerFactory.getLogger(CubeVisitService.class);
//TODO limit memory footprint
@@ -448,7 +448,7 @@ public class CubeVisitService extends CubeVisitProtos.CubeVisitService implement
}
@Override
- public Service getService() {
- return this;
+ public Iterable<Service> getServices() {
+ return Collections.singleton(this);
}
}
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/lookup/LookupTableToHFileJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/lookup/LookupTableToHFileJob.java
index 0135a22..7cb16d1 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/lookup/LookupTableToHFileJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/lookup/LookupTableToHFileJob.java
@@ -27,12 +27,12 @@ import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2;
import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
import org.apache.hadoop.hdfs.DFSConfigKeys;
@@ -209,24 +209,24 @@ public class LookupTableToHFileJob extends AbstractHadoopJob {
String hTableName = genHTableName(kylinConfig, admin, sourceTableName);
TableName tableName = TableName.valueOf(hTableName);
- HTableDescriptor hTableDesc = new HTableDescriptor(tableName);
- hTableDesc.setCompactionEnabled(false);
- hTableDesc.setValue(HTableDescriptor.SPLIT_POLICY, DisabledRegionSplitPolicy.class.getName());
- hTableDesc.setValue(IRealizationConstants.HTableTag, kylinConfig.getMetadataUrlPrefix());
- hTableDesc.setValue(IRealizationConstants.HTableCreationTime, String.valueOf(System.currentTimeMillis()));
+ TableDescriptorBuilder descBuilder = TableDescriptorBuilder.newBuilder(tableName);
+ descBuilder.setCompactionEnabled(false);
+ descBuilder.setValue(TableDescriptorBuilder.SPLIT_POLICY, DisabledRegionSplitPolicy.class.getName());
+ descBuilder.setValue(IRealizationConstants.HTableTag, kylinConfig.getMetadataUrlPrefix());
+ descBuilder.setValue(IRealizationConstants.HTableCreationTime, String.valueOf(System.currentTimeMillis()));
String commitInfo = KylinVersion.getGitCommitInfo();
if (!StringUtils.isEmpty(commitInfo)) {
- hTableDesc.setValue(IRealizationConstants.HTableGitTag, commitInfo);
+ descBuilder.setValue(IRealizationConstants.HTableGitTag, commitInfo);
}
- HColumnDescriptor cf = CubeHTableUtil.createColumnFamily(kylinConfig, HBaseLookupRowEncoder.CF_STRING, false);
- hTableDesc.addFamily(cf);
+ ColumnFamilyDescriptor cf = CubeHTableUtil.createColumnFamily(kylinConfig, HBaseLookupRowEncoder.CF_STRING, false);
+ descBuilder.modifyColumnFamily(cf);
try {
if (shardNum > 1) {
- admin.createTable(hTableDesc, getSplitsByShardNum(shardNum));
+ admin.createTable(descBuilder.build(), getSplitsByShardNum(shardNum));
} else {
- admin.createTable(hTableDesc);
+ admin.createTable(descBuilder.build());
}
} finally {
IOUtils.closeQuietly(admin);
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
index d06c993..97f6262 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
@@ -24,11 +24,12 @@ import java.util.Locale;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.regionserver.BloomType;
@@ -36,6 +37,7 @@ import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
import org.apache.hadoop.hbase.security.User;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.KylinVersion;
+import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.cube.CubeInstance;
import org.apache.kylin.cube.CubeSegment;
import org.apache.kylin.cube.model.CubeDesc;
@@ -60,25 +62,25 @@ public class CubeHTableUtil {
CubeDesc cubeDesc = cubeInstance.getDescriptor();
KylinConfig kylinConfig = cubeDesc.getConfig();
- HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf(cubeSegment.getStorageLocationIdentifier()));
- tableDesc.setValue(HTableDescriptor.SPLIT_POLICY, DisabledRegionSplitPolicy.class.getName());
- tableDesc.setValue(IRealizationConstants.HTableTag, kylinConfig.getMetadataUrlPrefix());
- tableDesc.setValue(IRealizationConstants.HTableCreationTime, String.valueOf(System.currentTimeMillis()));
+ TableDescriptorBuilder descBuilder = TableDescriptorBuilder.newBuilder(TableName.valueOf(cubeSegment.getStorageLocationIdentifier()));
+ descBuilder.setValue(TableDescriptorBuilder.SPLIT_POLICY, DisabledRegionSplitPolicy.class.getName());
+ descBuilder.setValue(IRealizationConstants.HTableTag, kylinConfig.getMetadataUrlPrefix());
+ descBuilder.setValue(IRealizationConstants.HTableCreationTime, String.valueOf(System.currentTimeMillis()));
if (!StringUtils.isEmpty(kylinConfig.getKylinOwner())) {
//HTableOwner is the team that provides kylin service
- tableDesc.setValue(IRealizationConstants.HTableOwner, kylinConfig.getKylinOwner());
+ descBuilder.setValue(IRealizationConstants.HTableOwner, kylinConfig.getKylinOwner());
}
String commitInfo = KylinVersion.getGitCommitInfo();
if (!StringUtils.isEmpty(commitInfo)) {
- tableDesc.setValue(IRealizationConstants.HTableGitTag, commitInfo);
+ descBuilder.setValue(IRealizationConstants.HTableGitTag, commitInfo);
}
//HTableUser is the cube owner, which will be the "user"
- tableDesc.setValue(IRealizationConstants.HTableUser, cubeInstance.getOwner());
+ descBuilder.setValue(IRealizationConstants.HTableUser, cubeInstance.getOwner());
- tableDesc.setValue(IRealizationConstants.HTableSegmentTag, cubeSegment.toString());
+ descBuilder.setValue(IRealizationConstants.HTableSegmentTag, cubeSegment.toString());
Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
Connection conn = HBaseConnection.get(kylinConfig.getStorageUrl());
@@ -87,12 +89,12 @@ public class CubeHTableUtil {
try {
if (User.isHBaseSecurityEnabled(conf)) {
// add coprocessor for bulk load
- tableDesc.addCoprocessor("org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint");
+ descBuilder.addCoprocessor("org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint");
}
for (HBaseColumnFamilyDesc cfDesc : cubeDesc.getHbaseMapping().getColumnFamily()) {
- HColumnDescriptor cf = createColumnFamily(kylinConfig, cfDesc.getName(), cfDesc.isMemoryHungry());
- tableDesc.addFamily(cf);
+ ColumnFamilyDescriptor cf = createColumnFamily(kylinConfig, cfDesc.getName(), cfDesc.isMemoryHungry());
+ descBuilder.setColumnFamily(cf);
}
if (admin.tableExists(TableName.valueOf(tableName))) {
@@ -101,9 +103,9 @@ public class CubeHTableUtil {
throw new RuntimeException("HBase table " + tableName + " exists!");
}
- DeployCoprocessorCLI.deployCoprocessor(tableDesc);
+ DeployCoprocessorCLI.deployCoprocessor(descBuilder);
- admin.createTable(tableDesc, splitKeys);
+ admin.createTable(descBuilder.build(), splitKeys);
Preconditions.checkArgument(admin.isTableAvailable(TableName.valueOf(tableName)), "table " + tableName + " created, but is not available due to some reasons");
logger.info("create hbase table " + tableName + " done.");
} finally {
@@ -137,14 +139,14 @@ public class CubeHTableUtil {
admin.deleteTable(tableName);
}
- HTableDescriptor tableDesc = new HTableDescriptor(tableName);
- tableDesc.setValue(HTableDescriptor.SPLIT_POLICY, DisabledRegionSplitPolicy.class.getName());
+ TableDescriptorBuilder descBuilder = TableDescriptorBuilder.newBuilder(tableName);
+ descBuilder.setValue(TableDescriptorBuilder.SPLIT_POLICY, DisabledRegionSplitPolicy.class.getName());
KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
- tableDesc.addFamily(createColumnFamily(kylinConfig, cfName, false));
+ descBuilder.modifyColumnFamily(createColumnFamily(kylinConfig, cfName, false));
logger.info("creating hbase table " + tableName);
- admin.createTable(tableDesc, null);
+ admin.createTable(descBuilder.build(), null);
Preconditions.checkArgument(admin.isTableAvailable(tableName), "table " + tableName + " created, but is not available due to some reasons");
logger.info("create hbase table " + tableName + " done.");
} finally {
@@ -152,8 +154,8 @@ public class CubeHTableUtil {
}
}
- public static HColumnDescriptor createColumnFamily(KylinConfig kylinConfig, String cfName, boolean isMemoryHungry) {
- HColumnDescriptor cf = new HColumnDescriptor(cfName);
+ public static ColumnFamilyDescriptor createColumnFamily(KylinConfig kylinConfig, String cfName, boolean isMemoryHungry) {
+ ColumnFamilyDescriptorBuilder cf = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(cfName));
cf.setMaxVersions(1);
if (isMemoryHungry) {
@@ -204,7 +206,7 @@ public class CubeHTableUtil {
cf.setInMemory(false);
cf.setBloomFilterType(BloomType.NONE);
cf.setScope(kylinConfig.getHBaseReplicationScope());
- return cf;
+ return cf.build();
}
}
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
index ac9ad15..6a3d56d 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
@@ -42,11 +42,12 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.KylinVersion;
import org.apache.kylin.common.util.Bytes;
@@ -179,7 +180,7 @@ public class DeployCoprocessorCLI {
}
logger.info("Commit Information: " + commitInfo);
for (String tableName : tableNames) {
- HTableDescriptor tableDesc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
+ TableDescriptor tableDesc = hbaseAdmin.getDescriptor(TableName.valueOf(tableName));
String gitTag = tableDesc.getValue(IRealizationConstants.HTableGitTag);
if (commitInfo.equals(gitTag)) {
filteredList.add(tableName);
@@ -250,18 +251,18 @@ public class DeployCoprocessorCLI {
return result;
}
- public static void deployCoprocessor(HTableDescriptor tableDesc) {
+ public static void deployCoprocessor(TableDescriptorBuilder desBuilder) {
try {
- initHTableCoprocessor(tableDesc);
- logger.info("hbase table " + tableDesc.getTableName() + " deployed with coprocessor.");
+ initHTableCoprocessor(desBuilder);
+ logger.info("hbase table " + desBuilder.build().getTableName() + " deployed with coprocessor.");
} catch (Exception ex) {
- logger.error("Error deploying coprocessor on " + tableDesc.getTableName(), ex);
+ logger.error("Error deploying coprocessor on " + desBuilder.build().getTableName(), ex);
logger.error("Will try creating the table without coprocessor.");
}
}
- private static void initHTableCoprocessor(HTableDescriptor desc) throws IOException {
+ private static void initHTableCoprocessor(TableDescriptorBuilder descBuilder) throws IOException {
KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration();
FileSystem fileSystem = FileSystem.get(hconf);
@@ -269,17 +270,18 @@ public class DeployCoprocessorCLI {
String localCoprocessorJar = kylinConfig.getCoprocessorLocalJar();
Path hdfsCoprocessorJar = DeployCoprocessorCLI.uploadCoprocessorJar(localCoprocessorJar, fileSystem, null);
- DeployCoprocessorCLI.addCoprocessorOnHTable(desc, hdfsCoprocessorJar);
+ DeployCoprocessorCLI.addCoprocessorOnHTable(descBuilder, hdfsCoprocessorJar);
}
- public static void addCoprocessorOnHTable(HTableDescriptor desc, Path hdfsCoprocessorJar) throws IOException {
- logger.info("Add coprocessor on " + desc.getNameAsString());
- desc.addCoprocessor(CubeEndpointClass, hdfsCoprocessorJar, 1001, null);
+ public static void addCoprocessorOnHTable(TableDescriptorBuilder descBuilder, Path hdfsCoprocessorJar) throws IOException {
+ logger.info("Add coprocessor on " + descBuilder.build().getTableName().toString());
+ descBuilder.addCoprocessor(CubeEndpointClass, hdfsCoprocessorJar, 1001, null);
}
public static boolean resetCoprocessor(String tableName, Admin hbaseAdmin, Path hdfsCoprocessorJar) throws IOException {
KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
- HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
+ TableDescriptor desc = hbaseAdmin.getDescriptor(TableName.valueOf(tableName));
+ TableDescriptorBuilder descBuilder = TableDescriptorBuilder.newBuilder(desc);
//when the table has migrated from dev env to test(prod) env, the dev server
//should not reset the coprocessor of the table.
@@ -295,30 +297,30 @@ public class DeployCoprocessorCLI {
hbaseAdmin.disableTable(TableName.valueOf(tableName));
while (desc.hasCoprocessor(CubeObserverClassOld2)) {
- desc.removeCoprocessor(CubeObserverClassOld2);
+ desc = descBuilder.removeCoprocessor(CubeObserverClassOld2).build();
}
while (desc.hasCoprocessor(CubeEndpointClass)) {
- desc.removeCoprocessor(CubeEndpointClass);
+ desc = descBuilder.removeCoprocessor(CubeEndpointClass).build();
}
while (desc.hasCoprocessor(IIEndpointClass)) {
- desc.removeCoprocessor(IIEndpointClass);
+ desc = descBuilder.removeCoprocessor(IIEndpointClass).build();
}
// remove legacy coprocessor from v1.x
while (desc.hasCoprocessor(CubeObserverClassOld)) {
- desc.removeCoprocessor(CubeObserverClassOld);
+ desc = descBuilder.removeCoprocessor(CubeObserverClassOld).build();
}
while (desc.hasCoprocessor(IIEndpointClassOld)) {
- desc.removeCoprocessor(IIEndpointClassOld);
+ desc = descBuilder.removeCoprocessor(IIEndpointClassOld).build();
}
- addCoprocessorOnHTable(desc, hdfsCoprocessorJar);
+ addCoprocessorOnHTable(descBuilder, hdfsCoprocessorJar);
// update commit tags
String commitInfo = KylinVersion.getGitCommitInfo();
if (!StringUtils.isEmpty(commitInfo)) {
- desc.setValue(IRealizationConstants.HTableGitTag, commitInfo);
+ descBuilder.setValue(IRealizationConstants.HTableGitTag, commitInfo);
}
- hbaseAdmin.modifyTable(TableName.valueOf(tableName), desc);
+ hbaseAdmin.modifyTable(descBuilder.build());
logger.info("Enable " + tableName);
hbaseAdmin.enableTable(TableName.valueOf(tableName));
@@ -491,9 +493,9 @@ public class DeployCoprocessorCLI {
HashSet<String> result = new HashSet<String>();
for (String tableName : tableNames) {
- HTableDescriptor tableDescriptor = null;
+ TableDescriptor tableDescriptor = null;
try {
- tableDescriptor = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
+ tableDescriptor = hbaseAdmin.getDescriptor(TableName.valueOf(tableName));
} catch (TableNotFoundException e) {
logger.warn("Table not found " + tableName, e);
continue;