You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by ma...@apache.org on 2016/07/14 06:49:41 UTC

kylin git commit: code refactor: make all tool CLI subclass of AbstractApplication

Repository: kylin
Updated Branches:
  refs/heads/master 49a0a41f0 -> bb1cbda97


code refactor: make all tool CLI subclass of AbstractApplication


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/bb1cbda9
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/bb1cbda9
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/bb1cbda9

Branch: refs/heads/master
Commit: bb1cbda97ba9278e3ee77f1df593cc533b89dc75
Parents: 49a0a41
Author: Hongbin Ma <ma...@apache.org>
Authored: Thu Jul 14 14:49:03 2016 +0800
Committer: Hongbin Ma <ma...@apache.org>
Committed: Thu Jul 14 14:49:03 2016 +0800

----------------------------------------------------------------------
 .../kylin/job/execution/AbstractExecutable.java |  2 +-
 .../engine/mr/common/AbstractHadoopJob.java     |  1 -
 .../mr/steps/RowKeyDistributionCheckerJob.java  |  6 +-
 .../kylin/provision/BuildCubeWithEngine.java    |  6 +-
 .../kylin/provision/BuildCubeWithStream.java    |  6 +-
 .../apache/kylin/rest/service/AdminService.java |  3 +-
 .../storage/hbase/util/CleanHtableCLI.java      | 35 +++++------
 .../kylin/storage/hbase/util/HBaseClean.java    | 53 +++++++---------
 .../hbase/util/HtableAlterMetadataCLI.java      | 53 ++++++++--------
 .../storage/hbase/util/OrphanHBaseCleanJob.java | 66 +++++++++-----------
 .../storage/hbase/util/StorageCleanupJob.java   | 65 ++++++++-----------
 11 files changed, 131 insertions(+), 165 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/bb1cbda9/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
----------------------------------------------------------------------
diff --git a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java b/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
index ab70007..4dedad1 100644
--- a/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
+++ b/core-job/src/main/java/org/apache/kylin/job/execution/AbstractExecutable.java
@@ -95,7 +95,7 @@ public abstract class AbstractExecutable implements Executable, Idempotent {
     @Override
     public final ExecuteResult execute(ExecutableContext executableContext) throws ExecuteException {
 
-        logger.info("Executing >>>>>>>>>>>>>   " + this.getName() + "   <<<<<<<<<<<<<");
+        logger.info("Executing AbstractExecutable (" + this.getName() + ")");
 
         Preconditions.checkArgument(executableContext instanceof DefaultContext);
         ExecuteResult result = null;

http://git-wip-us.apache.org/repos/asf/kylin/blob/bb1cbda9/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
index bafe65b..b483dc1 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/common/AbstractHadoopJob.java
@@ -77,7 +77,6 @@ public abstract class AbstractHadoopJob extends Configured implements Tool {
     protected static final Option OPTION_JOB_NAME = OptionBuilder.withArgName(BatchConstants.ARG_JOB_NAME).hasArg().isRequired(true).withDescription("Job name. For example, Kylin_Cuboid_Builder-clsfd_v2_Step_22-D)").create(BatchConstants.ARG_JOB_NAME);
     protected static final Option OPTION_CUBE_NAME = OptionBuilder.withArgName(BatchConstants.ARG_CUBE_NAME).hasArg().isRequired(true).withDescription("Cube name. For exmaple, flat_item_cube").create(BatchConstants.ARG_CUBE_NAME);
     protected static final Option OPTION_CUBING_JOB_ID = OptionBuilder.withArgName(BatchConstants.ARG_CUBING_JOB_ID).hasArg().isRequired(false).withDescription("ID of cubing job executable").create(BatchConstants.ARG_CUBING_JOB_ID);
-    protected static final Option OPTION_II_NAME = OptionBuilder.withArgName(BatchConstants.ARG_II_NAME).hasArg().isRequired(true).withDescription("II name. For exmaple, some_ii").create(BatchConstants.ARG_II_NAME);
     protected static final Option OPTION_SEGMENT_NAME = OptionBuilder.withArgName(BatchConstants.ARG_SEGMENT_NAME).hasArg().isRequired(true).withDescription("Cube segment name").create(BatchConstants.ARG_SEGMENT_NAME);
     protected static final Option OPTION_INPUT_PATH = OptionBuilder.withArgName(BatchConstants.ARG_INPUT).hasArg().isRequired(true).withDescription("Input path").create(BatchConstants.ARG_INPUT);
     protected static final Option OPTION_INPUT_FORMAT = OptionBuilder.withArgName(BatchConstants.ARG_INPUT_FORMAT).hasArg().isRequired(false).withDescription("Input format").create(BatchConstants.ARG_INPUT_FORMAT);

http://git-wip-us.apache.org/repos/asf/kylin/blob/bb1cbda9/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerJob.java
index c5a0ff2..fc8d3e7 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerJob.java
@@ -39,7 +39,7 @@ import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
 public class RowKeyDistributionCheckerJob extends AbstractHadoopJob {
 
     @SuppressWarnings("static-access")
-    protected static final Option rowKeyStatsFilePath = OptionBuilder.withArgName("path").hasArg().isRequired(true).withDescription("rowKeyStatsFilePath").create("rowKeyStatsFilePath");
+    protected static final Option ROW_KEY_STATS_FILE_PATH = OptionBuilder.withArgName("path").hasArg().isRequired(true).withDescription("rowKeyStatsFilePath").create("rowKeyStatsFilePath");
 
     @Override
     public int run(String[] args) throws Exception {
@@ -49,11 +49,11 @@ public class RowKeyDistributionCheckerJob extends AbstractHadoopJob {
             options.addOption(OPTION_INPUT_PATH);
             options.addOption(OPTION_OUTPUT_PATH);
             options.addOption(OPTION_JOB_NAME);
-            options.addOption(rowKeyStatsFilePath);
+            options.addOption(ROW_KEY_STATS_FILE_PATH);
 
             parseOptions(options, args);
 
-            String statsFilePath = getOptionValue(rowKeyStatsFilePath);
+            String statsFilePath = getOptionValue(ROW_KEY_STATS_FILE_PATH);
 
             // start job
             String jobName = getOptionValue(OPTION_JOB_NAME);

http://git-wip-us.apache.org/repos/asf/kylin/blob/bb1cbda9/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
index 9ce2bf2..3d60a3c 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithEngine.java
@@ -37,7 +37,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.util.ToolRunner;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.common.util.HBaseMetadataTestCase;
@@ -414,8 +413,9 @@ public class BuildCubeWithEngine {
     private int cleanupOldStorage() throws Exception {
         String[] args = { "--delete", "true" };
 
-        int exitCode = ToolRunner.run(new StorageCleanupJob(), args);
-        return exitCode;
+        StorageCleanupJob cli = new StorageCleanupJob();
+        cli.execute(args);
+        return 0;
     }
 
     private void checkHFilesInHBase(CubeSegment segment) throws IOException {

http://git-wip-us.apache.org/repos/asf/kylin/blob/bb1cbda9/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
----------------------------------------------------------------------
diff --git a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
index 1655a17..d41dc34 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
@@ -23,7 +23,6 @@ import java.io.IOException;
 import java.util.UUID;
 
 import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.util.ToolRunner;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.common.util.DateFormat;
@@ -111,8 +110,9 @@ public class BuildCubeWithStream {
     private static int cleanupOldStorage() throws Exception {
         String[] args = { "--delete", "true" };
 
-        int exitCode = ToolRunner.run(new StorageCleanupJob(), args);
-        return exitCode;
+        StorageCleanupJob cli = new StorageCleanupJob();
+        cli.execute(args);
+        return 0;
     }
 
     public void build() throws Exception {

http://git-wip-us.apache.org/repos/asf/kylin/blob/bb1cbda9/server-base/src/main/java/org/apache/kylin/rest/service/AdminService.java
----------------------------------------------------------------------
diff --git a/server-base/src/main/java/org/apache/kylin/rest/service/AdminService.java b/server-base/src/main/java/org/apache/kylin/rest/service/AdminService.java
index 6c85898..b92c305 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/AdminService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/AdminService.java
@@ -25,7 +25,6 @@ import java.util.Properties;
 
 import org.apache.commons.configuration.ConfigurationException;
 import org.apache.commons.configuration.PropertiesConfiguration;
-import org.apache.hadoop.util.ToolRunner;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.exception.InternalErrorException;
@@ -101,7 +100,7 @@ public class AdminService extends BasicService {
         StorageCleanupJob job = new StorageCleanupJob();
         String[] args = new String[] { "-delete", "true" };
         try {
-            ToolRunner.run(job, args);
+            job.execute(args);
         } catch (Exception e) {
             throw new InternalErrorException(e.getMessage(), e);
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/bb1cbda9/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
index 9e30a06..a150607 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
@@ -24,8 +24,8 @@ import org.apache.commons.cli.Options;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
+import org.apache.kylin.common.util.AbstractApplication;
+import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
@@ -33,24 +33,10 @@ import org.slf4j.LoggerFactory;
 
 /**
  */
-public class CleanHtableCLI extends AbstractHadoopJob {
+public class CleanHtableCLI extends AbstractApplication {
 
     protected static final Logger logger = LoggerFactory.getLogger(CleanHtableCLI.class);
 
-    @Override
-    public int run(String[] args) throws Exception {
-        Options options = new Options();
-        try {
-
-            clean();
-
-            return 0;
-        } catch (Exception e) {
-            printUsage(options);
-            throw e;
-        }
-    }
-
     private void clean() throws IOException {
         Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
         HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
@@ -71,7 +57,18 @@ public class CleanHtableCLI extends AbstractHadoopJob {
     }
 
     public static void main(String[] args) throws Exception {
-        int exitCode = ToolRunner.run(new CleanHtableCLI(), args);
-        System.exit(exitCode);
+        CleanHtableCLI cli = new CleanHtableCLI();
+        cli.execute(args);
+    }
+
+    @Override
+    protected Options getOptions() {
+        Options options = new Options();
+        return options;
+    }
+
+    @Override
+    protected void execute(OptionsHelper optionsHelper) throws Exception {
+        clean();
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/bb1cbda9/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
index b6958d6..6749d6c 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
@@ -19,7 +19,6 @@
 package org.apache.kylin.storage.hbase.util;
 
 import java.io.IOException;
-import java.util.Arrays;
 import java.util.List;
 
 import org.apache.commons.cli.Option;
@@ -28,8 +27,8 @@ import org.apache.commons.cli.Options;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
+import org.apache.kylin.common.util.AbstractApplication;
+import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
@@ -40,7 +39,7 @@ import com.google.common.collect.Lists;
 /**
  * clean hbase tables by tag
  */
-public class HBaseClean extends AbstractHadoopJob {
+public class HBaseClean extends AbstractApplication {
 
     @SuppressWarnings("static-access")
     private static final Option OPTION_DELETE = OptionBuilder.withArgName("delete").hasArg().isRequired(true).withDescription("actually delete or not").create("delete");
@@ -49,33 +48,10 @@ public class HBaseClean extends AbstractHadoopJob {
     private static final Option OPTION_TAG = OptionBuilder.withArgName("tag").hasArg().isRequired(true).withDescription("the tag of HTable").create("tag");
 
     protected static final Logger logger = LoggerFactory.getLogger(HBaseClean.class);
+
     boolean delete = false;
     String tag = null;
 
-    @Override
-    public int run(String[] args) throws Exception {
-        Options options = new Options();
-
-        logger.info("jobs args: " + Arrays.toString(args));
-        try {
-            options.addOption(OPTION_DELETE);
-            options.addOption(OPTION_TAG);
-            parseOptions(options, args);
-
-            logger.info("options: '" + getOptionsAsString() + "'");
-
-            tag = getOptionValue(OPTION_TAG);
-            delete = Boolean.parseBoolean(getOptionValue(OPTION_DELETE));
-
-            cleanUp();
-
-            return 0;
-        } catch (Exception e) {
-            e.printStackTrace(System.err);
-            throw e;
-        }
-    }
-
     private void cleanUp() {
         try {
             // get all kylin hbase tables
@@ -121,7 +97,24 @@ public class HBaseClean extends AbstractHadoopJob {
     }
 
     public static void main(String[] args) throws Exception {
-        int exitCode = ToolRunner.run(new HBaseClean(), args);
-        System.exit(exitCode);
+        HBaseClean cli = new HBaseClean();
+        cli.execute(args);
+    }
+
+    @Override
+    protected Options getOptions() {
+        Options options = new Options();
+        options.addOption(OPTION_DELETE);
+        options.addOption(OPTION_TAG);
+        return options;
+
+    }
+
+    @Override
+    protected void execute(OptionsHelper optionsHelper) throws Exception {
+        tag = optionsHelper.getOptionValue(OPTION_TAG);
+        delete = Boolean.parseBoolean(optionsHelper.getOptionValue(OPTION_DELETE));
+
+        cleanUp();
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/bb1cbda9/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
index 4db183b..ca1a060 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
@@ -27,8 +27,9 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
+import org.apache.kylin.common.util.AbstractApplication;
+import org.apache.kylin.common.util.OptionsHelper;
+import org.apache.kylin.engine.mr.common.BatchConstants;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -36,10 +37,11 @@ import org.slf4j.LoggerFactory;
 /**
  */
 @SuppressWarnings("static-access")
-public class HtableAlterMetadataCLI extends AbstractHadoopJob {
+public class HtableAlterMetadataCLI extends AbstractApplication {
 
     private static final Option OPTION_METADATA_KEY = OptionBuilder.withArgName("key").hasArg().isRequired(true).withDescription("The metadata key").create("key");
     private static final Option OPTION_METADATA_VALUE = OptionBuilder.withArgName("value").hasArg().isRequired(true).withDescription("The metadata value").create("value");
+    protected static final Option OPTION_HTABLE_NAME = OptionBuilder.withArgName(BatchConstants.ARG_HTABLE_NAME).hasArg().isRequired(true).withDescription("HTable name").create(BatchConstants.ARG_HTABLE_NAME);
 
     protected static final Logger logger = LoggerFactory.getLogger(HtableAlterMetadataCLI.class);
 
@@ -47,28 +49,6 @@ public class HtableAlterMetadataCLI extends AbstractHadoopJob {
     String metadataKey;
     String metadataValue;
 
-    @Override
-    public int run(String[] args) throws Exception {
-        Options options = new Options();
-        try {
-            options.addOption(OPTION_HTABLE_NAME);
-            options.addOption(OPTION_METADATA_KEY);
-            options.addOption(OPTION_METADATA_VALUE);
-
-            parseOptions(options, args);
-            tableName = getOptionValue(OPTION_HTABLE_NAME);
-            metadataKey = getOptionValue(OPTION_METADATA_KEY);
-            metadataValue = getOptionValue(OPTION_METADATA_VALUE);
-
-            alter();
-
-            return 0;
-        } catch (Exception e) {
-            printUsage(options);
-            throw e;
-        }
-    }
-
     private void alter() throws IOException {
         Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
         HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
@@ -82,7 +62,26 @@ public class HtableAlterMetadataCLI extends AbstractHadoopJob {
     }
 
     public static void main(String[] args) throws Exception {
-        int exitCode = ToolRunner.run(new HtableAlterMetadataCLI(), args);
-        System.exit(exitCode);
+        HtableAlterMetadataCLI cli = new HtableAlterMetadataCLI();
+        cli.execute(args);
+    }
+
+    @Override
+    protected Options getOptions() {
+        Options options = new Options();
+        options.addOption(OPTION_HTABLE_NAME);
+        options.addOption(OPTION_METADATA_KEY);
+        options.addOption(OPTION_METADATA_VALUE);
+        return options;
+    }
+
+    @Override
+    protected void execute(OptionsHelper optionsHelper) throws Exception {
+        tableName = optionsHelper.getOptionValue(OPTION_HTABLE_NAME);
+        metadataKey = optionsHelper.getOptionValue(OPTION_METADATA_KEY);
+        metadataValue = optionsHelper.getOptionValue(OPTION_METADATA_VALUE);
+
+        alter();
+
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/bb1cbda9/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
index f0618c9..8ff5b0f 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/OrphanHBaseCleanJob.java
@@ -20,7 +20,6 @@ package org.apache.kylin.storage.hbase.util;
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.List;
 import java.util.Set;
 import java.util.TreeSet;
@@ -32,15 +31,15 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
+import org.apache.kylin.common.util.AbstractApplication;
+import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
  */
-public class OrphanHBaseCleanJob extends AbstractHadoopJob {
+public class OrphanHBaseCleanJob extends AbstractApplication {
 
     @SuppressWarnings("static-access")
     private static final Option OPTION_DELETE = OptionBuilder.withArgName("delete").hasArg().isRequired(false).withDescription("Delete the unused storage").create("delete");
@@ -52,37 +51,6 @@ public class OrphanHBaseCleanJob extends AbstractHadoopJob {
     boolean delete = false;
     Set<String> metastoreWhitelistSet = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
 
-    @Override
-    public int run(String[] args) throws Exception {
-        Options options = new Options();
-
-        logger.info("jobs args: " + Arrays.toString(args));
-        try {
-            options.addOption(OPTION_DELETE);
-            options.addOption(OPTION_WHITELIST);
-            parseOptions(options, args);
-
-            logger.info("options: '" + getOptionsAsString() + "'");
-            logger.info("delete option value: '" + getOptionValue(OPTION_DELETE) + "'");
-            delete = Boolean.parseBoolean(getOptionValue(OPTION_DELETE));
-            String[] metastoreWhitelist = getOptionValue(OPTION_WHITELIST).split(",");
-
-            for (String ms : metastoreWhitelist) {
-                logger.info("metadata store in white list: " + ms);
-                metastoreWhitelistSet.add(ms);
-            }
-
-            Configuration conf = HBaseConfiguration.create(getConf());
-
-            cleanUnusedHBaseTables(conf);
-
-            return 0;
-        } catch (Exception e) {
-            e.printStackTrace(System.err);
-            throw e;
-        }
-    }
-
     private void cleanUnusedHBaseTables(Configuration conf) throws IOException {
 
         // get all kylin hbase tables
@@ -128,7 +96,31 @@ public class OrphanHBaseCleanJob extends AbstractHadoopJob {
     }
 
     public static void main(String[] args) throws Exception {
-        int exitCode = ToolRunner.run(new OrphanHBaseCleanJob(), args);
-        System.exit(exitCode);
+        OrphanHBaseCleanJob job = new OrphanHBaseCleanJob();
+        job.execute(args);
+    }
+
+    @Override
+    protected Options getOptions() {
+        Options options = new Options();
+        options.addOption(OPTION_DELETE);
+        options.addOption(OPTION_WHITELIST);
+        return options;
+    }
+
+    @Override
+    protected void execute(OptionsHelper optionsHelper) throws Exception {
+        logger.info("options: '" + optionsHelper.getOptionsAsString() + "'");
+        logger.info("delete option value: '" + optionsHelper.getOptionValue(OPTION_DELETE) + "'");
+        delete = Boolean.parseBoolean(optionsHelper.getOptionValue(OPTION_DELETE));
+        String[] metastoreWhitelist = optionsHelper.getOptionValue(OPTION_WHITELIST).split(",");
+
+        for (String ms : metastoreWhitelist) {
+            logger.info("metadata store in white list: " + ms);
+            metastoreWhitelistSet.add(ms);
+        }
+
+        Configuration conf = HBaseConfiguration.create();
+        cleanUnusedHBaseTables(conf);
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/bb1cbda9/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
index 3f4a6d5..214375a 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
@@ -22,7 +22,6 @@ import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.StringReader;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.List;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutorService;
@@ -42,14 +41,14 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.util.ToolRunner;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.AbstractApplication;
 import org.apache.kylin.common.util.CliCommandExecutor;
+import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.CubeSegment;
-import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
 import org.apache.kylin.job.JobInstance;
 import org.apache.kylin.job.engine.JobEngineConfig;
 import org.apache.kylin.job.execution.ExecutableState;
@@ -58,50 +57,17 @@ import org.apache.kylin.metadata.realization.IRealizationConstants;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-public class StorageCleanupJob extends AbstractHadoopJob {
+public class StorageCleanupJob extends AbstractApplication {
 
     @SuppressWarnings("static-access")
     private static final Option OPTION_DELETE = OptionBuilder.withArgName("delete").hasArg().isRequired(false).withDescription("Delete the unused storage").create("delete");
 
     protected static final Logger logger = LoggerFactory.getLogger(StorageCleanupJob.class);
-
     public static final int TIME_THRESHOLD_DELETE_HTABLE = 10; // Unit minute
 
     boolean delete = false;
-
     protected static ExecutableManager executableManager = ExecutableManager.getInstance(KylinConfig.getInstanceFromEnv());
 
-    /*
-     * (non-Javadoc)
-     * 
-     * @see org.apache.hadoop.util.Tool#run(java.lang.String[])
-     */
-    @Override
-    public int run(String[] args) throws Exception {
-        Options options = new Options();
-
-        logger.info("jobs args: " + Arrays.toString(args));
-        try {
-            options.addOption(OPTION_DELETE);
-            parseOptions(options, args);
-
-            logger.info("options: '" + getOptionsAsString() + "'");
-            logger.info("delete option value: '" + getOptionValue(OPTION_DELETE) + "'");
-            delete = Boolean.parseBoolean(getOptionValue(OPTION_DELETE));
-
-            Configuration conf = HBaseConfiguration.create(getConf());
-
-            cleanUnusedIntermediateHiveTable(conf);
-            cleanUnusedHdfsFiles(conf);
-            cleanUnusedHBaseTables(conf);
-
-            return 0;
-        } catch (Exception e) {
-            printUsage(options);
-            throw e;
-        }
-    }
-
     private void cleanUnusedHBaseTables(Configuration conf) throws IOException {
         CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
         long TIME_THREADSHOLD = KylinConfig.getInstanceFromEnv().getStorageCleanupTimeThreshold();
@@ -162,6 +128,27 @@ public class StorageCleanupJob extends AbstractHadoopJob {
         hbaseAdmin.close();
     }
 
+    @Override
+    protected Options getOptions() {
+        Options options = new Options();
+        options.addOption(OPTION_DELETE);
+        return options;
+    }
+
+    @Override
+    protected void execute(OptionsHelper optionsHelper) throws Exception {
+        logger.info("options: '" + optionsHelper.getOptionsAsString() + "'");
+        logger.info("delete option value: '" + optionsHelper.getOptionValue(OPTION_DELETE) + "'");
+        delete = Boolean.parseBoolean(optionsHelper.getOptionValue(OPTION_DELETE));
+
+        Configuration conf = HBaseConfiguration.create();
+
+        cleanUnusedIntermediateHiveTable(conf);
+        cleanUnusedHdfsFiles(conf);
+        cleanUnusedHBaseTables(conf);
+
+    }
+
     class DeleteHTableRunnable implements Callable {
         HBaseAdmin hbaseAdmin;
         String htableName;
@@ -322,7 +309,7 @@ public class StorageCleanupJob extends AbstractHadoopJob {
     }
 
     public static void main(String[] args) throws Exception {
-        int exitCode = ToolRunner.run(new StorageCleanupJob(), args);
-        System.exit(exitCode);
+        StorageCleanupJob cli = new StorageCleanupJob();
+        cli.execute(args);
     }
 }