You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by li...@apache.org on 2015/01/23 09:32:30 UTC
[43/50] [abbrv] incubator-kylin git commit: KYLIN-567,
More flexible validation of new segments, done
KYLIN-567, More flexible validation of new segments, done
Project: http://git-wip-us.apache.org/repos/asf/incubator-kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-kylin/commit/acb27ea1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-kylin/tree/acb27ea1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-kylin/diff/acb27ea1
Branch: refs/heads/inverted-index
Commit: acb27ea170533a23926a947b3daa1b54520e0033
Parents: 8072fb7
Author: Li, Yang <ya...@ebay.com>
Authored: Fri Jan 23 15:45:44 2015 +0800
Committer: Li, Yang <ya...@ebay.com>
Committed: Fri Jan 23 15:45:44 2015 +0800
----------------------------------------------------------------------
.../src/main/java/com/kylinolap/cube/CubeManager.java | 10 +++++-----
.../job/cube/UpdateCubeInfoAfterMergeStep.java | 3 ++-
.../com/kylinolap/job/hadoop/AbstractHadoopJob.java | 10 +++++-----
.../hadoop/cardinality/HiveColumnCardinalityJob.java | 2 +-
.../cardinality/HiveColumnCardinalityUpdateJob.java | 2 +-
.../java/com/kylinolap/job/hadoop/cube/CuboidJob.java | 14 +++++++-------
.../com/kylinolap/job/hadoop/cube/MergeCuboidJob.java | 3 +--
.../job/hadoop/cube/RowKeyDistributionCheckerJob.java | 2 +-
.../kylinolap/job/hadoop/cube/StorageCleanupJob.java | 2 +-
.../job/hadoop/dict/CreateDictionaryJob.java | 2 +-
.../hadoop/dict/CreateInvertedIndexDictionaryJob.java | 2 +-
.../kylinolap/job/hadoop/hbase/CreateHTableJob.java | 2 +-
.../job/hadoop/invertedindex/IICreateHTableJob.java | 2 +-
.../com/kylinolap/job/tools/CubeMigrationCLI.java | 4 ++--
14 files changed, 30 insertions(+), 30 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/acb27ea1/cube/src/main/java/com/kylinolap/cube/CubeManager.java
----------------------------------------------------------------------
diff --git a/cube/src/main/java/com/kylinolap/cube/CubeManager.java b/cube/src/main/java/com/kylinolap/cube/CubeManager.java
index 70fb8b6..1dc2da0 100644
--- a/cube/src/main/java/com/kylinolap/cube/CubeManager.java
+++ b/cube/src/main/java/com/kylinolap/cube/CubeManager.java
@@ -313,11 +313,8 @@ public class CubeManager implements IRealizationProvider {
if (cubeInstance.getDescriptor().getCubePartitionDesc().isPartitioned()) {
if (readySegments.isEmpty()) {
startDate = cubeInstance.getDescriptor().getCubePartitionDesc().getPartitionDateStart();
- newSegment = newSegment(cubeInstance, startDate, endDate);
- } else {
- startDate = readySegments.get(readySegments.size() - 1).getDateRangeEnd();
- newSegment = newSegment(cubeInstance, startDate, endDate);
}
+ newSegment = newSegment(cubeInstance, startDate, endDate);
} else {
newSegment = newSegment(cubeInstance, 0, Long.MAX_VALUE);
}
@@ -339,7 +336,7 @@ public class CubeManager implements IRealizationProvider {
* For each cube htable, we leverage htable's metadata to keep track of
* which kylin server(represented by its kylin_metadata prefix) owns this htable
*/
- public static String getHtableMetadataKey() {
+ public static String getHTableMetadataKey() {
return "KYLIN_HOST";
}
@@ -444,6 +441,9 @@ public class CubeManager implements IRealizationProvider {
}
private CubeSegment newSegment(CubeInstance cubeInstance, long startDate, long endDate) {
+ if (startDate >= endDate)
+ throw new IllegalArgumentException("New segment range invalid, start date must be earlier than end date, " + startDate + " < " + endDate);
+
CubeSegment segment = new CubeSegment();
String incrementalSegName = CubeSegment.getSegmentName(startDate, endDate);
segment.setUuid(UUID.randomUUID().toString());
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/acb27ea1/job/src/main/java/com/kylinolap/job/cube/UpdateCubeInfoAfterMergeStep.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/com/kylinolap/job/cube/UpdateCubeInfoAfterMergeStep.java b/job/src/main/java/com/kylinolap/job/cube/UpdateCubeInfoAfterMergeStep.java
index 9e0fd63..dfcab68 100644
--- a/job/src/main/java/com/kylinolap/job/cube/UpdateCubeInfoAfterMergeStep.java
+++ b/job/src/main/java/com/kylinolap/job/cube/UpdateCubeInfoAfterMergeStep.java
@@ -43,6 +43,7 @@ public class UpdateCubeInfoAfterMergeStep extends AbstractExecutable {
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
final CubeInstance cube = cubeManager.getCube(getCubeName());
+
CubeSegment mergedSegment = cube.getSegmentById(getSegmentId());
if (mergedSegment == null) {
return new ExecuteResult(ExecuteResult.State.FAILED, "there is no segment with id:" + getSegmentId());
@@ -51,11 +52,11 @@ public class UpdateCubeInfoAfterMergeStep extends AbstractExecutable {
Preconditions.checkState(StringUtils.isNotEmpty(cubeSizeString), "Can't get cube segment size.");
long cubeSize = Long.parseLong(cubeSizeString) / 1024;
+ // collect source statistics
List<String> mergingSegmentIds = getMergingSegmentIds();
if (mergingSegmentIds.isEmpty()) {
return new ExecuteResult(ExecuteResult.State.FAILED, "there are no merging segments");
}
-
long sourceCount = 0L;
long sourceSize = 0L;
for (String id : mergingSegmentIds) {
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/acb27ea1/job/src/main/java/com/kylinolap/job/hadoop/AbstractHadoopJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/com/kylinolap/job/hadoop/AbstractHadoopJob.java b/job/src/main/java/com/kylinolap/job/hadoop/AbstractHadoopJob.java
index c538d10..a8fc85d 100644
--- a/job/src/main/java/com/kylinolap/job/hadoop/AbstractHadoopJob.java
+++ b/job/src/main/java/com/kylinolap/job/hadoop/AbstractHadoopJob.java
@@ -63,7 +63,7 @@ import com.kylinolap.metadata.model.TableDesc;
@SuppressWarnings("static-access")
public abstract class AbstractHadoopJob extends Configured implements Tool {
- protected static final Logger log = LoggerFactory.getLogger(AbstractHadoopJob.class);
+ protected static final Logger logger = LoggerFactory.getLogger(AbstractHadoopJob.class);
protected static final Option OPTION_JOB_NAME = OptionBuilder.withArgName("name").hasArg().isRequired(true).withDescription("Job name. For exmaple, Kylin_Cuboid_Builder-clsfd_v2_Step_22-D)").create("jobname");
protected static final Option OPTION_CUBE_NAME = OptionBuilder.withArgName("name").hasArg().isRequired(true).withDescription("Cube name. For exmaple, flat_item_cube").create("cubename");
@@ -120,7 +120,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
} else {
job.waitForCompletion(true);
retVal = job.isSuccessful() ? 0 : 1;
- log.debug("Job '" + job.getJobName() + "' finished " + (job.isSuccessful() ? "successfully in " : "with failures. Time taken ") + StringUtils.formatTime((System.nanoTime() - start) / 1000000L));
+ logger.debug("Job '" + job.getJobName() + "' finished " + (job.isSuccessful() ? "successfully in " : "with failures. Time taken ") + StringUtils.formatTime((System.nanoTime() - start) / 1000000L));
}
return retVal;
}
@@ -145,7 +145,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
FileStatus[] fileStatuses = fs.listStatus(path);
boolean hasDir = false;
for (FileStatus stat : fileStatuses) {
- if (stat.isDirectory()) {
+ if (stat.isDirectory() && !stat.getPath().getName().startsWith("_")) {
hasDir = true;
addInputDirs(stat.getPath().toString(), job);
}
@@ -154,7 +154,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
addInputDirs(path.toString(), job);
}
} else {
- System.out.println("Add input " + inp);
+ logger.debug("Add input " + inp);
FileInputFormat.addInputPath(job, new Path(inp));
}
}
@@ -276,7 +276,7 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
public static KylinConfig loadKylinPropsAndMetadata(Configuration conf) throws IOException {
File metaDir = new File("meta");
System.setProperty(KylinConfig.KYLIN_CONF, metaDir.getAbsolutePath());
- System.out.println("The absolute path for meta dir is " + metaDir.getAbsolutePath());
+ logger.info("The absolute path for meta dir is " + metaDir.getAbsolutePath());
KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
kylinConfig.setMetadataUrl(metaDir.getCanonicalPath());
return kylinConfig;
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/acb27ea1/job/src/main/java/com/kylinolap/job/hadoop/cardinality/HiveColumnCardinalityJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/com/kylinolap/job/hadoop/cardinality/HiveColumnCardinalityJob.java b/job/src/main/java/com/kylinolap/job/hadoop/cardinality/HiveColumnCardinalityJob.java
index b3f3a38..e70d3c7 100644
--- a/job/src/main/java/com/kylinolap/job/hadoop/cardinality/HiveColumnCardinalityJob.java
+++ b/job/src/main/java/com/kylinolap/job/hadoop/cardinality/HiveColumnCardinalityJob.java
@@ -123,7 +123,7 @@ public class HiveColumnCardinalityJob extends AbstractHadoopJob {
} catch (Exception e) {
printUsage(options);
e.printStackTrace(System.err);
- log.error(e.getLocalizedMessage(), e);
+ logger.error(e.getLocalizedMessage(), e);
return 2;
}
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/acb27ea1/job/src/main/java/com/kylinolap/job/hadoop/cardinality/HiveColumnCardinalityUpdateJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/com/kylinolap/job/hadoop/cardinality/HiveColumnCardinalityUpdateJob.java b/job/src/main/java/com/kylinolap/job/hadoop/cardinality/HiveColumnCardinalityUpdateJob.java
index 5399ea4..c3226ce 100644
--- a/job/src/main/java/com/kylinolap/job/hadoop/cardinality/HiveColumnCardinalityUpdateJob.java
+++ b/job/src/main/java/com/kylinolap/job/hadoop/cardinality/HiveColumnCardinalityUpdateJob.java
@@ -91,7 +91,7 @@ public class HiveColumnCardinalityUpdateJob extends AbstractHadoopJob {
} catch (Exception e) {
printUsage(options);
e.printStackTrace(System.err);
- log.error(e.getLocalizedMessage(), e);
+ logger.error(e.getLocalizedMessage(), e);
return 2;
}
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/acb27ea1/job/src/main/java/com/kylinolap/job/hadoop/cube/CuboidJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/com/kylinolap/job/hadoop/cube/CuboidJob.java b/job/src/main/java/com/kylinolap/job/hadoop/cube/CuboidJob.java
index e3bc002..980d083 100644
--- a/job/src/main/java/com/kylinolap/job/hadoop/cube/CuboidJob.java
+++ b/job/src/main/java/com/kylinolap/job/hadoop/cube/CuboidJob.java
@@ -47,7 +47,7 @@ import com.kylinolap.job.hadoop.AbstractHadoopJob;
*/
public class CuboidJob extends AbstractHadoopJob {
- protected static final Logger log = LoggerFactory.getLogger(CuboidJob.class);
+ protected static final Logger logger = LoggerFactory.getLogger(CuboidJob.class);
private static final String MAPRED_REDUCE_TASKS = "mapred.reduce.tasks";
@SuppressWarnings("rawtypes")
@@ -78,7 +78,7 @@ public class CuboidJob extends AbstractHadoopJob {
CubeInstance cube = cubeMgr.getCube(cubeName);
job = Job.getInstance(getConf(), getOptionValue(OPTION_JOB_NAME));
- System.out.println("Starting: " + job.getJobName());
+ logger.info("Starting: " + job.getJobName());
FileInputFormat.setInputPaths(job, input);
File jarFile = new File(config.getKylinJobJarPath());
@@ -130,7 +130,7 @@ public class CuboidJob extends AbstractHadoopJob {
return waitForCompletion(job);
} catch (Exception e) {
printUsage(options);
- log.error(e.getLocalizedMessage(), e);
+ logger.error(e.getLocalizedMessage(), e);
return 2;
}
}
@@ -176,10 +176,10 @@ public class CuboidJob extends AbstractHadoopJob {
jobConf.setInt(MAPRED_REDUCE_TASKS, numReduceTasks);
- System.out.println("Having total map input MB " + Math.round(totalMapInputMB));
- System.out.println("Having level " + level + ", pre-level cuboids " + preLevelCuboids + ", this level cuboids " + thisLevelCuboids);
- System.out.println("Having per reduce MB " + perReduceInputMB + ", reduce count ratio " + reduceCountRatio);
- System.out.println("Setting " + MAPRED_REDUCE_TASKS + "=" + numReduceTasks);
+ logger.info("Having total map input MB " + Math.round(totalMapInputMB));
+ logger.info("Having level " + level + ", pre-level cuboids " + preLevelCuboids + ", this level cuboids " + thisLevelCuboids);
+ logger.info("Having per reduce MB " + perReduceInputMB + ", reduce count ratio " + reduceCountRatio);
+ logger.info("Setting " + MAPRED_REDUCE_TASKS + "=" + numReduceTasks);
}
/**
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/acb27ea1/job/src/main/java/com/kylinolap/job/hadoop/cube/MergeCuboidJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/com/kylinolap/job/hadoop/cube/MergeCuboidJob.java b/job/src/main/java/com/kylinolap/job/hadoop/cube/MergeCuboidJob.java
index d97e135..591d751 100644
--- a/job/src/main/java/com/kylinolap/job/hadoop/cube/MergeCuboidJob.java
+++ b/job/src/main/java/com/kylinolap/job/hadoop/cube/MergeCuboidJob.java
@@ -57,7 +57,6 @@ public class MergeCuboidJob extends CuboidJob {
KylinConfig config = KylinConfig.getInstanceFromEnv();
CubeManager cubeMgr = CubeManager.getInstance(config);
CubeInstance cube = cubeMgr.getCube(cubeName);
- // CubeSegment cubeSeg = cubeMgr.findSegment(cube, segmentName);
// start job
String jobName = getOptionValue(OPTION_JOB_NAME);
@@ -72,7 +71,7 @@ public class MergeCuboidJob extends CuboidJob {
job.setJarByClass(this.getClass());
}
- // setJobJar(job);
+ // set inputs
addInputDirs(getOptionValue(OPTION_INPUT_PATH), job);
Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/acb27ea1/job/src/main/java/com/kylinolap/job/hadoop/cube/RowKeyDistributionCheckerJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/com/kylinolap/job/hadoop/cube/RowKeyDistributionCheckerJob.java b/job/src/main/java/com/kylinolap/job/hadoop/cube/RowKeyDistributionCheckerJob.java
index 718c188..e9929b9 100644
--- a/job/src/main/java/com/kylinolap/job/hadoop/cube/RowKeyDistributionCheckerJob.java
+++ b/job/src/main/java/com/kylinolap/job/hadoop/cube/RowKeyDistributionCheckerJob.java
@@ -84,7 +84,7 @@ public class RowKeyDistributionCheckerJob extends AbstractHadoopJob {
return waitForCompletion(job);
} catch (Exception e) {
printUsage(options);
- log.error(e.getLocalizedMessage(), e);
+ logger.error(e.getLocalizedMessage(), e);
return 2;
}
}
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/acb27ea1/job/src/main/java/com/kylinolap/job/hadoop/cube/StorageCleanupJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/com/kylinolap/job/hadoop/cube/StorageCleanupJob.java b/job/src/main/java/com/kylinolap/job/hadoop/cube/StorageCleanupJob.java
index b5ea4f4..bba53ab 100644
--- a/job/src/main/java/com/kylinolap/job/hadoop/cube/StorageCleanupJob.java
+++ b/job/src/main/java/com/kylinolap/job/hadoop/cube/StorageCleanupJob.java
@@ -101,7 +101,7 @@ public class StorageCleanupJob extends AbstractHadoopJob {
HTableDescriptor[] tableDescriptors = hbaseAdmin.listTables(tableNamePrefix + ".*");
List<String> allTablesNeedToBeDropped = new ArrayList<String>();
for (HTableDescriptor desc : tableDescriptors) {
- String host = desc.getValue(CubeManager.getHtableMetadataKey());
+ String host = desc.getValue(CubeManager.getHTableMetadataKey());
if (KylinConfig.getInstanceFromEnv().getMetadataUrlPrefix().equalsIgnoreCase(host)) {
//only take care htables that belongs to self
allTablesNeedToBeDropped.add(desc.getTableName().getNameAsString());
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/acb27ea1/job/src/main/java/com/kylinolap/job/hadoop/dict/CreateDictionaryJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/com/kylinolap/job/hadoop/dict/CreateDictionaryJob.java b/job/src/main/java/com/kylinolap/job/hadoop/dict/CreateDictionaryJob.java
index 0ee1811..ee35785 100644
--- a/job/src/main/java/com/kylinolap/job/hadoop/dict/CreateDictionaryJob.java
+++ b/job/src/main/java/com/kylinolap/job/hadoop/dict/CreateDictionaryJob.java
@@ -52,7 +52,7 @@ public class CreateDictionaryJob extends AbstractHadoopJob {
} catch (Exception e) {
printUsage(options);
e.printStackTrace(System.err);
- log.error(e.getLocalizedMessage(), e);
+ logger.error(e.getLocalizedMessage(), e);
returnCode = 2;
}
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/acb27ea1/job/src/main/java/com/kylinolap/job/hadoop/dict/CreateInvertedIndexDictionaryJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/com/kylinolap/job/hadoop/dict/CreateInvertedIndexDictionaryJob.java b/job/src/main/java/com/kylinolap/job/hadoop/dict/CreateInvertedIndexDictionaryJob.java
index 3f21f1e..8f86e51 100644
--- a/job/src/main/java/com/kylinolap/job/hadoop/dict/CreateInvertedIndexDictionaryJob.java
+++ b/job/src/main/java/com/kylinolap/job/hadoop/dict/CreateInvertedIndexDictionaryJob.java
@@ -51,7 +51,7 @@ public class CreateInvertedIndexDictionaryJob extends AbstractHadoopJob {
} catch (Exception e) {
printUsage(options);
e.printStackTrace(System.err);
- log.error(e.getLocalizedMessage(), e);
+ logger.error(e.getLocalizedMessage(), e);
return 2;
}
}
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/acb27ea1/job/src/main/java/com/kylinolap/job/hadoop/hbase/CreateHTableJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/com/kylinolap/job/hadoop/hbase/CreateHTableJob.java b/job/src/main/java/com/kylinolap/job/hadoop/hbase/CreateHTableJob.java
index b60b5c7..7da8827 100644
--- a/job/src/main/java/com/kylinolap/job/hadoop/hbase/CreateHTableJob.java
+++ b/job/src/main/java/com/kylinolap/job/hadoop/hbase/CreateHTableJob.java
@@ -81,7 +81,7 @@ public class CreateHTableJob extends AbstractHadoopJob {
HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf(tableName));
// https://hbase.apache.org/apidocs/org/apache/hadoop/hbase/regionserver/ConstantSizeRegionSplitPolicy.html
tableDesc.setValue(HTableDescriptor.SPLIT_POLICY, ConstantSizeRegionSplitPolicy.class.getName());
- tableDesc.setValue(CubeManager.getHtableMetadataKey(), config.getMetadataUrlPrefix());
+ tableDesc.setValue(CubeManager.getHTableMetadataKey(), config.getMetadataUrlPrefix());
Configuration conf = HBaseConfiguration.create(getConf());
HBaseAdmin admin = new HBaseAdmin(conf);
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/acb27ea1/job/src/main/java/com/kylinolap/job/hadoop/invertedindex/IICreateHTableJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/com/kylinolap/job/hadoop/invertedindex/IICreateHTableJob.java b/job/src/main/java/com/kylinolap/job/hadoop/invertedindex/IICreateHTableJob.java
index 9f5105b..44b8c24 100644
--- a/job/src/main/java/com/kylinolap/job/hadoop/invertedindex/IICreateHTableJob.java
+++ b/job/src/main/java/com/kylinolap/job/hadoop/invertedindex/IICreateHTableJob.java
@@ -92,7 +92,7 @@ public class IICreateHTableJob extends AbstractHadoopJob {
} catch (Exception e) {
printUsage(options);
e.printStackTrace(System.err);
- log.error(e.getLocalizedMessage(), e);
+ logger.error(e.getLocalizedMessage(), e);
return 2;
}
}
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/acb27ea1/job/src/main/java/com/kylinolap/job/tools/CubeMigrationCLI.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/com/kylinolap/job/tools/CubeMigrationCLI.java b/job/src/main/java/com/kylinolap/job/tools/CubeMigrationCLI.java
index 5a2b97a..816b2d4 100644
--- a/job/src/main/java/com/kylinolap/job/tools/CubeMigrationCLI.java
+++ b/job/src/main/java/com/kylinolap/job/tools/CubeMigrationCLI.java
@@ -263,7 +263,7 @@ public class CubeMigrationCLI {
String tableName = (String) opt.params[0];
HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
hbaseAdmin.disableTable(tableName);
- desc.setValue(CubeManager.getHtableMetadataKey(), dstConfig.getMetadataUrlPrefix());
+ desc.setValue(CubeManager.getHTableMetadataKey(), dstConfig.getMetadataUrlPrefix());
hbaseAdmin.modifyTable(tableName, desc);
hbaseAdmin.enableTable(tableName);
logger.info("CHANGE_HTABLE_HOST is completed");
@@ -380,7 +380,7 @@ public class CubeMigrationCLI {
String tableName = (String) opt.params[0];
HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
hbaseAdmin.disableTable(tableName);
- desc.setValue(CubeManager.getHtableMetadataKey(), srcConfig.getMetadataUrlPrefix());
+ desc.setValue(CubeManager.getHTableMetadataKey(), srcConfig.getMetadataUrlPrefix());
hbaseAdmin.modifyTable(tableName, desc);
hbaseAdmin.enableTable(tableName);
break;