You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by ma...@apache.org on 2016/10/08 02:26:03 UTC

[41/50] [abbrv] kylin git commit: minor, clean error handling on Job classes

minor, clean error handling on Job classes


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/e22039d1
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/e22039d1
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/e22039d1

Branch: refs/heads/orderedbytes
Commit: e22039d1344f6bb768df62f73737066a7f10838a
Parents: 92fc0e5
Author: Yang Li <li...@apache.org>
Authored: Fri Sep 30 22:05:12 2016 +0800
Committer: Yang Li <li...@apache.org>
Committed: Fri Sep 30 22:05:59 2016 +0800

----------------------------------------------------------------------
 .../engine/mr/steps/CreateDictionaryJob.java    | 34 ++++-----
 .../apache/kylin/engine/mr/steps/CuboidJob.java |  4 --
 .../engine/mr/steps/FactDistinctColumnsJob.java |  4 --
 .../kylin/engine/mr/steps/InMemCuboidJob.java   |  4 --
 .../kylin/engine/mr/steps/MergeCuboidJob.java   |  4 --
 .../engine/mr/steps/MetadataCleanupJob.java     | 21 +++---
 .../mr/steps/RowKeyDistributionCheckerJob.java  | 59 +++++++--------
 .../cardinality/HiveColumnCardinalityJob.java   | 76 +++++++++-----------
 .../kylin/storage/hbase/steps/BulkLoadJob.java  | 54 +++++++-------
 .../storage/hbase/steps/CreateHTableJob.java    | 27 +++----
 .../kylin/storage/hbase/steps/CubeHFileJob.java |  4 --
 11 files changed, 118 insertions(+), 173 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/e22039d1/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CreateDictionaryJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CreateDictionaryJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CreateDictionaryJob.java
index 69c0095..5db7c88 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CreateDictionaryJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CreateDictionaryJob.java
@@ -40,29 +40,23 @@ public class CreateDictionaryJob extends AbstractHadoopJob {
     @Override
     public int run(String[] args) throws Exception {
         Options options = new Options();
+        options.addOption(OPTION_CUBE_NAME);
+        options.addOption(OPTION_SEGMENT_ID);
+        options.addOption(OPTION_INPUT_PATH);
+        parseOptions(options, args);
 
-        try {
-            options.addOption(OPTION_CUBE_NAME);
-            options.addOption(OPTION_SEGMENT_ID);
-            options.addOption(OPTION_INPUT_PATH);
-            parseOptions(options, args);
+        final String cubeName = getOptionValue(OPTION_CUBE_NAME);
+        final String segmentID = getOptionValue(OPTION_SEGMENT_ID);
+        final String factColumnsInputPath = getOptionValue(OPTION_INPUT_PATH);
 
-            final String cubeName = getOptionValue(OPTION_CUBE_NAME);
-            final String segmentID = getOptionValue(OPTION_SEGMENT_ID);
-            final String factColumnsInputPath = getOptionValue(OPTION_INPUT_PATH);
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
 
-            KylinConfig config = KylinConfig.getInstanceFromEnv();
-
-            DictionaryGeneratorCLI.processSegment(config, cubeName, segmentID, new DistinctColumnValuesProvider() {
-                @Override
-                public ReadableTable getDistinctValuesFor(TblColRef col) {
-                    return new DFSFileTable(factColumnsInputPath + "/" + col.getName(), -1);
-                }
-            });
-        } catch (Exception e) {
-            printUsage(options);
-            throw e;
-        }
+        DictionaryGeneratorCLI.processSegment(config, cubeName, segmentID, new DistinctColumnValuesProvider() {
+            @Override
+            public ReadableTable getDistinctValuesFor(TblColRef col) {
+                return new DFSFileTable(factColumnsInputPath + "/" + col.getName(), -1);
+            }
+        });
 
         return returnCode;
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/e22039d1/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java
index 0399300..6b0c86e 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/CuboidJob.java
@@ -140,10 +140,6 @@ public class CuboidJob extends AbstractHadoopJob {
             this.deletePath(job.getConfiguration(), output);
 
             return waitForCompletion(job);
-        } catch (Exception e) {
-            logger.error("error in CuboidJob", e);
-            printUsage(options);
-            throw e;
         } finally {
             if (job != null)
                 cleanupTempConfFile(job.getConfiguration());

http://git-wip-us.apache.org/repos/asf/kylin/blob/e22039d1/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsJob.java
index a9cc17f..6603728 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsJob.java
@@ -107,10 +107,6 @@ public class FactDistinctColumnsJob extends AbstractHadoopJob {
 
             return waitForCompletion(job);
 
-        } catch (Exception e) {
-            logger.error("error in FactDistinctColumnsJob", e);
-            printUsage(options);
-            throw e;
         } finally {
             if (job != null)
                 cleanupTempConfFile(job.getConfiguration());

http://git-wip-us.apache.org/repos/asf/kylin/blob/e22039d1/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java
index f6ed8e7..013f2c9 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/InMemCuboidJob.java
@@ -130,10 +130,6 @@ public class InMemCuboidJob extends AbstractHadoopJob {
             HadoopUtil.deletePath(job.getConfiguration(), outputPath);
 
             return waitForCompletion(job);
-        } catch (Exception e) {
-            logger.error("error in CuboidJob", e);
-            printUsage(options);
-            throw e;
         } finally {
             if (job != null)
                 cleanupTempConfFile(job.getConfiguration());

http://git-wip-us.apache.org/repos/asf/kylin/blob/e22039d1/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java
index e0ae74d..810da23 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MergeCuboidJob.java
@@ -86,10 +86,6 @@ public class MergeCuboidJob extends CuboidJob {
             this.deletePath(job.getConfiguration(), output);
 
             return waitForCompletion(job);
-        } catch (Exception e) {
-            logger.error("error in MergeCuboidJob", e);
-            printUsage(options);
-            throw e;
         } finally {
             if (job != null)
                 cleanupTempConfFile(job.getConfiguration());

http://git-wip-us.apache.org/repos/asf/kylin/blob/e22039d1/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MetadataCleanupJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MetadataCleanupJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MetadataCleanupJob.java
index f3ab310..962697e 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MetadataCleanupJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/MetadataCleanupJob.java
@@ -67,23 +67,18 @@ public class MetadataCleanupJob extends AbstractHadoopJob {
         Options options = new Options();
 
         logger.info("jobs args: " + Arrays.toString(args));
-        try {
-            options.addOption(OPTION_DELETE);
-            parseOptions(options, args);
+        options.addOption(OPTION_DELETE);
+        parseOptions(options, args);
 
-            logger.info("options: '" + getOptionsAsString() + "'");
-            logger.info("delete option value: '" + getOptionValue(OPTION_DELETE) + "'");
-            delete = Boolean.parseBoolean(getOptionValue(OPTION_DELETE));
+        logger.info("options: '" + getOptionsAsString() + "'");
+        logger.info("delete option value: '" + getOptionValue(OPTION_DELETE) + "'");
+        delete = Boolean.parseBoolean(getOptionValue(OPTION_DELETE));
 
-            config = KylinConfig.getInstanceFromEnv();
+        config = KylinConfig.getInstanceFromEnv();
 
-            cleanup();
+        cleanup();
 
-            return 0;
-        } catch (Exception e) {
-            printUsage(options);
-            throw e;
-        }
+        return 0;
     }
 
     private ResourceStore getStore() {

http://git-wip-us.apache.org/repos/asf/kylin/blob/e22039d1/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerJob.java
----------------------------------------------------------------------
diff --git a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerJob.java b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerJob.java
index fc8d3e7..3419949 100644
--- a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerJob.java
+++ b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/RowKeyDistributionCheckerJob.java
@@ -45,49 +45,44 @@ public class RowKeyDistributionCheckerJob extends AbstractHadoopJob {
     public int run(String[] args) throws Exception {
         Options options = new Options();
 
-        try {
-            options.addOption(OPTION_INPUT_PATH);
-            options.addOption(OPTION_OUTPUT_PATH);
-            options.addOption(OPTION_JOB_NAME);
-            options.addOption(ROW_KEY_STATS_FILE_PATH);
+        options.addOption(OPTION_INPUT_PATH);
+        options.addOption(OPTION_OUTPUT_PATH);
+        options.addOption(OPTION_JOB_NAME);
+        options.addOption(ROW_KEY_STATS_FILE_PATH);
 
-            parseOptions(options, args);
+        parseOptions(options, args);
 
-            String statsFilePath = getOptionValue(ROW_KEY_STATS_FILE_PATH);
+        String statsFilePath = getOptionValue(ROW_KEY_STATS_FILE_PATH);
 
-            // start job
-            String jobName = getOptionValue(OPTION_JOB_NAME);
-            job = Job.getInstance(getConf(), jobName);
+        // start job
+        String jobName = getOptionValue(OPTION_JOB_NAME);
+        job = Job.getInstance(getConf(), jobName);
 
-            setJobClasspath(job, KylinConfig.getInstanceFromEnv());
+        setJobClasspath(job, KylinConfig.getInstanceFromEnv());
 
-            addInputDirs(getOptionValue(OPTION_INPUT_PATH), job);
+        addInputDirs(getOptionValue(OPTION_INPUT_PATH), job);
 
-            Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
-            FileOutputFormat.setOutputPath(job, output);
+        Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
+        FileOutputFormat.setOutputPath(job, output);
 
-            // Mapper
-            job.setInputFormatClass(SequenceFileInputFormat.class);
-            job.setMapperClass(RowKeyDistributionCheckerMapper.class);
-            job.setMapOutputKeyClass(Text.class);
-            job.setMapOutputValueClass(LongWritable.class);
+        // Mapper
+        job.setInputFormatClass(SequenceFileInputFormat.class);
+        job.setMapperClass(RowKeyDistributionCheckerMapper.class);
+        job.setMapOutputKeyClass(Text.class);
+        job.setMapOutputValueClass(LongWritable.class);
 
-            // Reducer - only one
-            job.setReducerClass(RowKeyDistributionCheckerReducer.class);
-            job.setOutputFormatClass(SequenceFileOutputFormat.class);
-            job.setOutputKeyClass(Text.class);
-            job.setOutputValueClass(LongWritable.class);
-            job.setNumReduceTasks(1);
+        // Reducer - only one
+        job.setReducerClass(RowKeyDistributionCheckerReducer.class);
+        job.setOutputFormatClass(SequenceFileOutputFormat.class);
+        job.setOutputKeyClass(Text.class);
+        job.setOutputValueClass(LongWritable.class);
+        job.setNumReduceTasks(1);
 
-            job.getConfiguration().set("rowKeyStatsFilePath", statsFilePath);
+        job.getConfiguration().set("rowKeyStatsFilePath", statsFilePath);
 
-            this.deletePath(job.getConfiguration(), output);
+        this.deletePath(job.getConfiguration(), output);
 
-            return waitForCompletion(job);
-        } catch (Exception e) {
-            printUsage(options);
-            throw e;
-        }
+        return waitForCompletion(job);
     }
 
     public static void main(String[] args) throws Exception {

http://git-wip-us.apache.org/repos/asf/kylin/blob/e22039d1/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityJob.java
----------------------------------------------------------------------
diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityJob.java b/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityJob.java
index d03350e..c7d694f 100644
--- a/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityJob.java
+++ b/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/HiveColumnCardinalityJob.java
@@ -59,61 +59,55 @@ public class HiveColumnCardinalityJob extends AbstractHadoopJob {
 
         Options options = new Options();
 
-        try {
-            options.addOption(OPTION_TABLE);
-            options.addOption(OPTION_OUTPUT_PATH);
+        options.addOption(OPTION_TABLE);
+        options.addOption(OPTION_OUTPUT_PATH);
 
-            parseOptions(options, args);
+        parseOptions(options, args);
 
-            // start job
-            String jobName = JOB_TITLE + getOptionsAsString();
-            logger.info("Starting: " + jobName);
-            Configuration conf = getConf();
+        // start job
+        String jobName = JOB_TITLE + getOptionsAsString();
+        logger.info("Starting: " + jobName);
+        Configuration conf = getConf();
 
-            KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-            JobEngineConfig jobEngineConfig = new JobEngineConfig(kylinConfig);
-            conf.addResource(new Path(jobEngineConfig.getHadoopJobConfFilePath(null)));
+        KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+        JobEngineConfig jobEngineConfig = new JobEngineConfig(kylinConfig);
+        conf.addResource(new Path(jobEngineConfig.getHadoopJobConfFilePath(null)));
 
-            job = Job.getInstance(conf, jobName);
+        job = Job.getInstance(conf, jobName);
 
-            setJobClasspath(job, kylinConfig);
+        setJobClasspath(job, kylinConfig);
 
-            String table = getOptionValue(OPTION_TABLE);
-            job.getConfiguration().set(BatchConstants.CFG_TABLE_NAME, table);
+        String table = getOptionValue(OPTION_TABLE);
+        job.getConfiguration().set(BatchConstants.CFG_TABLE_NAME, table);
 
-            Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
-            FileOutputFormat.setOutputPath(job, output);
-            job.getConfiguration().set("dfs.block.size", "67108864");
+        Path output = new Path(getOptionValue(OPTION_OUTPUT_PATH));
+        FileOutputFormat.setOutputPath(job, output);
+        job.getConfiguration().set("dfs.block.size", "67108864");
 
-            // Mapper
-            IMRTableInputFormat tableInputFormat = MRUtil.getTableInputFormat(table);
-            tableInputFormat.configureJob(job);
+        // Mapper
+        IMRTableInputFormat tableInputFormat = MRUtil.getTableInputFormat(table);
+        tableInputFormat.configureJob(job);
 
-            job.setMapperClass(ColumnCardinalityMapper.class);
-            job.setMapOutputKeyClass(IntWritable.class);
-            job.setMapOutputValueClass(BytesWritable.class);
+        job.setMapperClass(ColumnCardinalityMapper.class);
+        job.setMapOutputKeyClass(IntWritable.class);
+        job.setMapOutputValueClass(BytesWritable.class);
 
-            // Reducer - only one
-            job.setReducerClass(ColumnCardinalityReducer.class);
-            job.setOutputFormatClass(TextOutputFormat.class);
-            job.setOutputKeyClass(IntWritable.class);
-            job.setOutputValueClass(LongWritable.class);
-            job.setNumReduceTasks(1);
+        // Reducer - only one
+        job.setReducerClass(ColumnCardinalityReducer.class);
+        job.setOutputFormatClass(TextOutputFormat.class);
+        job.setOutputKeyClass(IntWritable.class);
+        job.setOutputValueClass(LongWritable.class);
+        job.setNumReduceTasks(1);
 
-            this.deletePath(job.getConfiguration(), output);
+        this.deletePath(job.getConfiguration(), output);
 
-            logger.info("Going to submit HiveColumnCardinalityJob for table '" + table + "'");
+        logger.info("Going to submit HiveColumnCardinalityJob for table '" + table + "'");
 
-            TableDesc tableDesc = MetadataManager.getInstance(kylinConfig).getTableDesc(table);
-            attachKylinPropsAndMetadata(tableDesc, job.getConfiguration());
-            int result = waitForCompletion(job);
-
-            return result;
-        } catch (Exception e) {
-            printUsage(options);
-            throw e;
-        }
+        TableDesc tableDesc = MetadataManager.getInstance(kylinConfig).getTableDesc(table);
+        attachKylinPropsAndMetadata(tableDesc, job.getConfiguration());
+        int result = waitForCompletion(job);
 
+        return result;
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/e22039d1/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/BulkLoadJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/BulkLoadJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/BulkLoadJob.java
index f43a03a..1c05767 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/BulkLoadJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/BulkLoadJob.java
@@ -42,40 +42,34 @@ public class BulkLoadJob extends AbstractHadoopJob {
     public int run(String[] args) throws Exception {
         Options options = new Options();
 
-        try {
-
-            options.addOption(OPTION_INPUT_PATH);
-            options.addOption(OPTION_HTABLE_NAME);
-            options.addOption(OPTION_CUBE_NAME);
-            parseOptions(options, args);
-
-            String tableName = getOptionValue(OPTION_HTABLE_NAME).toUpperCase();
-            // e.g
-            // /tmp/kylin-3f150b00-3332-41ca-9d3d-652f67f044d7/test_kylin_cube_with_slr_ready_2_segments/hfile/
-            // end with "/"
-            String input = getOptionValue(OPTION_INPUT_PATH);
-
-            Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
-            FsShell shell = new FsShell(conf);
-            try {
-                shell.run(new String[] { "-chmod", "-R", "777", input });
-            } catch (Exception e) {
-                logger.error("Couldn't change the file permissions ", e);
-                throw new IOException(e);
-            }
+        options.addOption(OPTION_INPUT_PATH);
+        options.addOption(OPTION_HTABLE_NAME);
+        options.addOption(OPTION_CUBE_NAME);
+        parseOptions(options, args);
 
-            String[] newArgs = new String[2];
-            newArgs[0] = input;
-            newArgs[1] = tableName;
+        String tableName = getOptionValue(OPTION_HTABLE_NAME).toUpperCase();
+        // e.g
+        // /tmp/kylin-3f150b00-3332-41ca-9d3d-652f67f044d7/test_kylin_cube_with_slr_ready_2_segments/hfile/
+        // end with "/"
+        String input = getOptionValue(OPTION_INPUT_PATH);
 
-            logger.debug("Start to run LoadIncrementalHFiles");
-            int ret = ToolRunner.run(new LoadIncrementalHFiles(conf), newArgs);
-            logger.debug("End to run LoadIncrementalHFiles");
-            return ret;
+        Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
+        FsShell shell = new FsShell(conf);
+        try {
+            shell.run(new String[] { "-chmod", "-R", "777", input });
         } catch (Exception e) {
-            printUsage(options);
-            throw e;
+            logger.error("Couldn't change the file permissions ", e);
+            throw new IOException(e);
         }
+
+        String[] newArgs = new String[2];
+        newArgs[0] = input;
+        newArgs[1] = tableName;
+
+        logger.debug("Start to run LoadIncrementalHFiles");
+        int ret = ToolRunner.run(new LoadIncrementalHFiles(conf), newArgs);
+        logger.debug("End to run LoadIncrementalHFiles");
+        return ret;
     }
 
     public static void main(String[] args) throws Exception {

http://git-wip-us.apache.org/repos/asf/kylin/blob/e22039d1/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
index f83ad7d..fc52701 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
@@ -92,23 +92,16 @@ public class CreateHTableJob extends AbstractHadoopJob {
 
         Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
 
-        try {
-            byte[][] splitKeys;
-            if (statsEnabled) {
-                final Map<Long, Double> cuboidSizeMap = new CubeStatsReader(cubeSegment, kylinConfig).getCuboidSizeMap();
-                splitKeys = getRegionSplitsFromCuboidStatistics(cuboidSizeMap, kylinConfig, cubeSegment, partitionFilePath.getParent());
-            } else {
-                splitKeys = getRegionSplits(conf, partitionFilePath);
-            }
-
-            CubeHTableUtil.createHTable(cubeSegment, splitKeys);
-            return 0;
-        } catch (Exception e) {
-            printUsage(options);
-            e.printStackTrace(System.err);
-            logger.error(e.getLocalizedMessage(), e);
-            return 2;
+        byte[][] splitKeys;
+        if (statsEnabled) {
+            final Map<Long, Double> cuboidSizeMap = new CubeStatsReader(cubeSegment, kylinConfig).getCuboidSizeMap();
+            splitKeys = getRegionSplitsFromCuboidStatistics(cuboidSizeMap, kylinConfig, cubeSegment, partitionFilePath.getParent());
+        } else {
+            splitKeys = getRegionSplits(conf, partitionFilePath);
         }
+
+        CubeHTableUtil.createHTable(cubeSegment, splitKeys);
+        return 0;
     }
 
     @SuppressWarnings("deprecation")
@@ -209,7 +202,7 @@ public class CreateHTableJob extends AbstractHadoopJob {
             for (int i = 0; i < nRegion; i++) {
                 innerRegionSplits.add(new HashMap<Long, Double>());
             }
-            
+
             double[] regionSizes = new double[nRegion];
             for (long cuboidId : allCuboids) {
                 double estimatedSize = cubeSizeMap.get(cuboidId);

http://git-wip-us.apache.org/repos/asf/kylin/blob/e22039d1/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java
index 666b841..9e78aae 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java
@@ -105,10 +105,6 @@ public class CubeHFileJob extends AbstractHadoopJob {
             this.deletePath(job.getConfiguration(), output);
 
             return waitForCompletion(job);
-        } catch (Exception e) {
-            logger.error("error in CubeHFileJob", e);
-            printUsage(options);
-            throw e;
         } finally {
             if (job != null)
                 cleanupTempConfFile(job.getConfiguration());