You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by sh...@apache.org on 2015/07/27 09:11:17 UTC
[1/2] incubator-kylin git commit: KYLIN-901 fix a NPE
Repository: incubator-kylin
Updated Branches:
refs/heads/0.7-staging 0391b3101 -> f9a490df0
KYLIN-901 fix a NPE
Project: http://git-wip-us.apache.org/repos/asf/incubator-kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-kylin/commit/f9a490df
Tree: http://git-wip-us.apache.org/repos/asf/incubator-kylin/tree/f9a490df
Diff: http://git-wip-us.apache.org/repos/asf/incubator-kylin/diff/f9a490df
Branch: refs/heads/0.7-staging
Commit: f9a490df005a28509e5b577945870f9f7aa3ed56
Parents: 0349479
Author: shaofengshi <sh...@apache.org>
Authored: Mon Jul 27 15:10:49 2015 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Mon Jul 27 15:11:10 2015 +0800
----------------------------------------------------------------------
.../job/hadoop/cube/MetadataCleanupJob.java | 44 +++++++++++---------
1 file changed, 24 insertions(+), 20 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/f9a490df/job/src/main/java/org/apache/kylin/job/hadoop/cube/MetadataCleanupJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/cube/MetadataCleanupJob.java b/job/src/main/java/org/apache/kylin/job/hadoop/cube/MetadataCleanupJob.java
index 397fe84..8b84277 100644
--- a/job/src/main/java/org/apache/kylin/job/hadoop/cube/MetadataCleanupJob.java
+++ b/job/src/main/java/org/apache/kylin/job/hadoop/cube/MetadataCleanupJob.java
@@ -107,33 +107,37 @@ public class MetadataCleanupJob extends AbstractHadoopJob {
for (String resourceRoot : new String[]{ResourceStore.SNAPSHOT_RESOURCE_ROOT}) {
ArrayList<String> snapshotTables = getStore().listResources(resourceRoot);
- for (String snapshotTable : snapshotTables) {
- ArrayList<String> snapshotNames = getStore().listResources(snapshotTable);
- if (snapshotNames != null)
- for (String snapshot : snapshotNames) {
- if (!activeResourceList.contains(snapshot)) {
- if (isOlderThanThreshold(getStore().getResourceTimestamp(snapshot)))
- toDeleteResource.add(snapshot);
+ if (snapshotTables != null) {
+ for (String snapshotTable : snapshotTables) {
+ ArrayList<String> snapshotNames = getStore().listResources(snapshotTable);
+ if (snapshotNames != null)
+ for (String snapshot : snapshotNames) {
+ if (!activeResourceList.contains(snapshot)) {
+ if (isOlderThanThreshold(getStore().getResourceTimestamp(snapshot)))
+ toDeleteResource.add(snapshot);
+ }
}
- }
+ }
}
}
// three level resources, only dictionaries
ArrayList<String> dictTables = getStore().listResources(ResourceStore.DICT_RESOURCE_ROOT);
- for (String table : dictTables) {
- ArrayList<String> tableColNames = getStore().listResources(table);
- if (tableColNames != null)
- for (String tableCol : tableColNames) {
- ArrayList<String> dictionaries = getStore().listResources(tableCol);
- if (dictionaries != null)
- for (String dict : dictionaries)
- if (!activeResourceList.contains(dict)) {
- if (isOlderThanThreshold(getStore().getResourceTimestamp(dict)))
- toDeleteResource.add(dict);
- }
- }
+ if (dictTables != null) {
+ for (String table : dictTables) {
+ ArrayList<String> tableColNames = getStore().listResources(table);
+ if (tableColNames != null)
+ for (String tableCol : tableColNames) {
+ ArrayList<String> dictionaries = getStore().listResources(tableCol);
+ if (dictionaries != null)
+ for (String dict : dictionaries)
+ if (!activeResourceList.contains(dict)) {
+ if (isOlderThanThreshold(getStore().getResourceTimestamp(dict)))
+ toDeleteResource.add(dict);
+ }
+ }
+ }
}
[2/2] incubator-kylin git commit: KYLIN-805 drop the job dir from
hdfs after cube segment be merged;
Posted by sh...@apache.org.
KYLIN-805 drop the job dir from hdfs after cube segment be merged;
Project: http://git-wip-us.apache.org/repos/asf/incubator-kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-kylin/commit/03494797
Tree: http://git-wip-us.apache.org/repos/asf/incubator-kylin/tree/03494797
Diff: http://git-wip-us.apache.org/repos/asf/incubator-kylin/diff/03494797
Branch: refs/heads/0.7-staging
Commit: 034947979b67bf7b6ed25985c9bac591627b1443
Parents: 0391b31
Author: shaofengshi <sh...@apache.org>
Authored: Mon Jul 27 14:24:53 2015 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Mon Jul 27 15:11:10 2015 +0800
----------------------------------------------------------------------
.../apache/kylin/job/cube/CubingJobBuilder.java | 8 ++++++--
.../kylin/job/cube/GarbageCollectionStep.java | 18 +++++++++---------
2 files changed, 15 insertions(+), 11 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/03494797/job/src/main/java/org/apache/kylin/job/cube/CubingJobBuilder.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/cube/CubingJobBuilder.java b/job/src/main/java/org/apache/kylin/job/cube/CubingJobBuilder.java
index 74ee876..a4a44bc 100644
--- a/job/src/main/java/org/apache/kylin/job/cube/CubingJobBuilder.java
+++ b/job/src/main/java/org/apache/kylin/job/cube/CubingJobBuilder.java
@@ -102,6 +102,7 @@ public final class CubingJobBuilder extends AbstractJobBuilder {
List<String> mergingSegmentIds = Lists.newArrayList();
List<String> mergingCuboidPaths = Lists.newArrayList();
List<String> mergingHTables = Lists.newArrayList();
+ List<String> toDeletePaths = Lists.newArrayList();
for (CubeSegment merging : mergingSegments) {
mergingSegmentIds.add(merging.getUuid());
mergingHTables.add(merging.getStorageLocationIdentifier());
@@ -110,6 +111,7 @@ public final class CubingJobBuilder extends AbstractJobBuilder {
} else {
mergingCuboidPaths.add(getPathToMerge(merging));
}
+ toDeletePaths.add(getJobWorkingDir(merging.getLastBuildJobID()));
}
// merge cuboid
@@ -120,7 +122,7 @@ public final class CubingJobBuilder extends AbstractJobBuilder {
// update cube info
result.addTask(createUpdateCubeInfoAfterMergeStep(mergeSegment, mergingSegmentIds, convertCuboidToHfileStep.getId(), jobId));
- result.addTask(createGarbageCollectionStep(mergeSegment, mergingHTables, null, mergingCuboidPaths));
+ result.addTask(createGarbageCollectionStep(mergeSegment, mergingHTables, null, toDeletePaths));
return result;
}
@@ -137,10 +139,12 @@ public final class CubingJobBuilder extends AbstractJobBuilder {
List<String> mergingSegmentIds = Lists.newArrayList();
List<String> mergingCuboidPaths = Lists.newArrayList();
List<String> mergingHTables = Lists.newArrayList();
+ List<String> toDeletePaths = Lists.newArrayList();
for (CubeSegment merging : mergingSegments) {
mergingSegmentIds.add(merging.getUuid());
mergingCuboidPaths.add(getPathToMerge(merging));
mergingHTables.add(merging.getStorageLocationIdentifier());
+ toDeletePaths.add(getJobWorkingDir(merging.getLastBuildJobID()));
}
// merge cuboid
@@ -151,7 +155,7 @@ public final class CubingJobBuilder extends AbstractJobBuilder {
// update cube info
result.addTask(createUpdateCubeInfoAfterMergeStep(seg, mergingSegmentIds, convertCuboidToHfileStep.getId(), jobId));
- result.addTask(createGarbageCollectionStep(seg, mergingHTables, null, mergingCuboidPaths));
+ result.addTask(createGarbageCollectionStep(seg, mergingHTables, null, toDeletePaths));
return result;
}
http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/03494797/job/src/main/java/org/apache/kylin/job/cube/GarbageCollectionStep.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/cube/GarbageCollectionStep.java b/job/src/main/java/org/apache/kylin/job/cube/GarbageCollectionStep.java
index f95fc45..d015f51 100644
--- a/job/src/main/java/org/apache/kylin/job/cube/GarbageCollectionStep.java
+++ b/job/src/main/java/org/apache/kylin/job/cube/GarbageCollectionStep.java
@@ -87,7 +87,7 @@ public class GarbageCollectionStep extends AbstractExecutable {
final String dropHiveCMD = "hive -e \"" + dropSQL + "\"";
ShellCmdOutput shellCmdOutput = new ShellCmdOutput();
context.getConfig().getCliCommandExecutor().execute(dropHiveCMD, shellCmdOutput);
- output.append("Hive table " + hiveTable + " is dropped. \n");
+ output.append("Dropped Hive table " + hiveTable + " \n");
}
}
@@ -109,11 +109,11 @@ public class GarbageCollectionStep extends AbstractExecutable {
admin.disableTable(table);
}
admin.deleteTable(table);
- logger.debug("Dropped htable: " + table);
- output.append("HBase table " + table + " is dropped. \n");
+ logger.debug("Dropped HBase table " + table);
+ output.append("Dropped HBase table " + table + " \n");
} else {
- logger.debug("Skip htable: " + table);
- output.append("Skip htable: " + table + ". \n");
+ logger.debug("Skipped HBase table " + table);
+ output.append("Skipped HBase table " + table + " \n");
}
}
}
@@ -142,11 +142,11 @@ public class GarbageCollectionStep extends AbstractExecutable {
Path oldPath = new Path(path);
if (fileSystem.exists(oldPath)) {
fileSystem.delete(oldPath, true);
- logger.debug("Deleted path: " + path);
- output.append("Deleted path: " + path + " \n");
+ logger.debug("Dropped HDFS path: " + path);
+ output.append("Dropped HDFS path \"" + path + "\" \n");
} else {
- logger.debug("Path not exists: " + path);
- output.append("Path not exists: " + path + " \n");
+ logger.debug("HDFS path not exists: " + path);
+ output.append("HDFS path not exists: \"" + path + "\" \n");
}
}