You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by sh...@apache.org on 2015/09/07 15:41:46 UTC

[2/2] incubator-kylin git commit: KYLIN-998 small update in StorageCleanupJob

KYLIN-998 small update in StorageCleanupJob

Project: http://git-wip-us.apache.org/repos/asf/incubator-kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-kylin/commit/277e1524
Tree: http://git-wip-us.apache.org/repos/asf/incubator-kylin/tree/277e1524
Diff: http://git-wip-us.apache.org/repos/asf/incubator-kylin/diff/277e1524

Branch: refs/heads/1.x-staging
Commit: 277e1524f5be92ba03447ee33010f10b8de5ca75
Parents: 7a2ef17
Author: shaofengshi <sh...@apache.org>
Authored: Mon Sep 7 21:41:35 2015 +0800
Committer: shaofengshi <sh...@apache.org>
Committed: Mon Sep 7 21:41:35 2015 +0800

----------------------------------------------------------------------
 .../job/hadoop/cube/StorageCleanupJob.java      | 51 +++++++-------------
 1 file changed, 18 insertions(+), 33 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/277e1524/job/src/main/java/org/apache/kylin/job/hadoop/cube/StorageCleanupJob.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/cube/StorageCleanupJob.java b/job/src/main/java/org/apache/kylin/job/hadoop/cube/StorageCleanupJob.java
index ae684fe..0f6f9cb 100644
--- a/job/src/main/java/org/apache/kylin/job/hadoop/cube/StorageCleanupJob.java
+++ b/job/src/main/java/org/apache/kylin/job/hadoop/cube/StorageCleanupJob.java
@@ -18,13 +18,6 @@
 
 package org.apache.kylin.job.hadoop.cube;
 
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.StringReader;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
@@ -57,6 +50,13 @@ import org.apache.kylin.metadata.realization.IRealizationConstants;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
 /**
  * @author ysong1
  */
@@ -225,8 +225,6 @@ public class StorageCleanupJob extends AbstractHadoopJob {
     }
 
     private void cleanUnusedIntermediateHiveTable(Configuration conf) throws IOException {
-        FileSystem fs = FileSystem.get(conf);
-        //JobEngineConfig engineConfig = new JobEngineConfig(KylinConfig.getInstanceFromEnv());
         int uuidLength = 36;
 
         StringBuilder buf = new StringBuilder();
@@ -243,7 +241,7 @@ public class StorageCleanupJob extends AbstractHadoopJob {
             e.printStackTrace();
         }
 
-        if(output == null)
+        if (output == null)
             return;
         String outputStr = output.getOutput();
         BufferedReader reader = new BufferedReader(new StringReader(outputStr));
@@ -258,44 +256,31 @@ public class StorageCleanupJob extends AbstractHadoopJob {
 
             if (!state.isFinalState()) {
                 workingJobList.add(jobId);
-                log.info("Remove intermediate hive table with job id " + jobId + " with job status " + state);
+                log.info("Exclude intermediate hive table with job id " + jobId + " with job status " + state);
             }
         }
 
         while ((line = reader.readLine()) != null) {
-            if(line.startsWith("kylin_intermediate_")){
-                boolean isNeedDel = true;
+            if (line.startsWith("kylin_intermediate_")) {
+                boolean isNeedDel = false;
                 String uuid = line.substring(line.length() - uuidLength, line.length());
                 uuid = uuid.replace("_", "-");
                 //Check whether it's a hive table in use
-                if(workingJobList.contains(uuid)){
-                    isNeedDel = false;
+                if (allJobs.contains(uuid) && !workingJobList.contains(uuid)) {
+                    isNeedDel = true;
                 }
-                else{
-                    log.info("Hive table with uuid " + uuid + " is in use.");
-                }
-
-                //Check whether the hive table belongs to current Kylin instance
-                String hdfsPath = JobInstance.getJobWorkingDir(uuid, KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory());
-                Path p = new Path(hdfsPath);
 
-                if (fs.exists(p) == false) {
-                    isNeedDel = false;
-                }
-                else{
-                    log.info("Hive table with uuid " + uuid + " belongs to a different Kylin instance.");
-                }
-
-                if(isNeedDel)
+                if (isNeedDel) {
                     allHiveTablesNeedToBeDeleted.add(line);
+                }
             }
         }
-        
+
         if (delete == true) {
             buf.delete(0, buf.length());
             buf.append("hive -e \"");
 
-            for(String delHive : allHiveTablesNeedToBeDeleted){
+            for (String delHive : allHiveTablesNeedToBeDeleted) {
                 buf.append("drop table if exists " + delHive + "; ");
                 log.info("Remove " + delHive + " from hive tables.");
             }
@@ -315,7 +300,7 @@ public class StorageCleanupJob extends AbstractHadoopJob {
             System.out.println("----------------------------------------------------");
         }
 
-        if(reader != null)
+        if (reader != null)
             reader.close();
     }