You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by li...@apache.org on 2017/06/05 05:22:50 UTC

[02/67] [abbrv] kylin git commit: Revert "reformat code"

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/FuzzyValueCombinationTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/FuzzyValueCombinationTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/FuzzyValueCombinationTest.java
index 50051e0..afb0b33 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/FuzzyValueCombinationTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/FuzzyValueCombinationTest.java
@@ -84,8 +84,7 @@ public class FuzzyValueCombinationTest extends LocalFileMetadataTestCase {
 
     @Test
     public void testSomeNull() {
-        System.out
-                .println("test some null ============================================================================");
+        System.out.println("test some null ============================================================================");
         Map<TblColRef, Set<String>> values = Maps.newHashMap();
         values.put(col1, set("a", "b", "c"));
         values.put(col2, set());
@@ -99,8 +98,7 @@ public class FuzzyValueCombinationTest extends LocalFileMetadataTestCase {
 
     @Test
     public void testAllNulls() {
-        System.out
-                .println("test all nulls ============================================================================");
+        System.out.println("test all nulls ============================================================================");
         Map<TblColRef, Set<String>> values = Maps.newHashMap();
         values.put(col1, set());
         values.put(col2, set());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/HiveJDBCClientTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/HiveJDBCClientTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/HiveJDBCClientTest.java
index 5f87b3b..d2b3488 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/HiveJDBCClientTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/HiveJDBCClientTest.java
@@ -86,8 +86,7 @@ public class HiveJDBCClientTest {
         Statement stmt = con.createStatement();
         String tableName = "testHiveDriverTable";
         stmt.execute("drop table if exists " + tableName);
-        stmt.execute(
-                "create table " + tableName + " (key int, value string) row format delimited fields terminated by ' '");
+        stmt.execute("create table " + tableName + " (key int, value string) row format delimited fields terminated by ' '");
         // show tables
         String sql = "show tables '" + tableName + "'";
         System.out.println("Running: " + sql);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/TimeConditionLiteralsReplacerTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/TimeConditionLiteralsReplacerTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/TimeConditionLiteralsReplacerTest.java
index 5e3c112..63f8961 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/TimeConditionLiteralsReplacerTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/TimeConditionLiteralsReplacerTest.java
@@ -48,8 +48,7 @@ public class TimeConditionLiteralsReplacerTest extends FilterBaseTest {
 
         TimeConditionLiteralsReplacer filterDecorator = new TimeConditionLiteralsReplacer(compareFilter);
         byte[] bytes = TupleFilterSerializer.serialize(compareFilter, filterDecorator, DictCodeSystem.INSTANCE);
-        CompareTupleFilter compareTupleFilter = (CompareTupleFilter) TupleFilterSerializer.deserialize(bytes,
-                DictCodeSystem.INSTANCE);
+        CompareTupleFilter compareTupleFilter = (CompareTupleFilter) TupleFilterSerializer.deserialize(bytes, DictCodeSystem.INSTANCE);
         Assert.assertEquals("2000-01-01", compareTupleFilter.getFirstValue());
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
index 1235a9d..291072f 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/coprocessor/FilterBaseTest.java
@@ -201,8 +201,7 @@ public class FilterBaseTest extends LocalFileMetadataTestCase {
         int s1 = f1.getChildren().size();
         int s2 = f2.getChildren().size();
         if (s1 != s2) {
-            throw new IllegalStateException(
-                    "f1=" + str1 + ", f2=" + str2 + " has different children: " + s1 + " vs. " + s2);
+            throw new IllegalStateException("f1=" + str1 + ", f2=" + str2 + " has different children: " + s1 + " vs. " + s2);
         }
 
         for (int i = 0; i < s1; i++) {
@@ -210,8 +209,7 @@ public class FilterBaseTest extends LocalFileMetadataTestCase {
         }
     }
 
-    private static String[][] SAMPLE_DATA = new String[][] { { "2013-03-10", "2012-01-12", "2014-03-10" },
-            { "ClothinShoes & Accessories", "ABIN", "FP-GTC", "FP-NON-GTC" } };
+    private static String[][] SAMPLE_DATA = new String[][] { { "2013-03-10", "2012-01-12", "2014-03-10" }, { "ClothinShoes & Accessories", "ABIN", "FP-GTC", "FP-NON-GTC" } };
 
     protected Collection<Tuple> generateTuple(int number, List<TblColRef> columns, int[] matches) {
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/MeasureTypeOnlyAggrInBaseTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/MeasureTypeOnlyAggrInBaseTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/MeasureTypeOnlyAggrInBaseTest.java
index 7335017..f151876 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/MeasureTypeOnlyAggrInBaseTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/MeasureTypeOnlyAggrInBaseTest.java
@@ -89,11 +89,9 @@ public class MeasureTypeOnlyAggrInBaseTest extends LocalFileMetadataTestCase {
     }
 
     @Test
-    public void testIdentifyCuboidV2()
-            throws InvocationTargetException, NoSuchMethodException, IllegalAccessException, NoSuchFieldException {
+    public void testIdentifyCuboidV2() throws InvocationTargetException, NoSuchMethodException, IllegalAccessException, NoSuchFieldException {
         CubeDesc cubeDesc = cube.getDescriptor();
-        Cuboid ret = Cuboid.identifyCuboid(cubeDesc, Sets.<TblColRef> newHashSet(),
-                Lists.<FunctionDesc> newArrayList());
+        Cuboid ret = Cuboid.identifyCuboid(cubeDesc, Sets.<TblColRef> newHashSet(), Lists.<FunctionDesc> newArrayList());
         long baseCuboidId = cubeDesc.getRowkey().getFullMask();
         assertNotEquals(baseCuboidId, ret.getId());
         ret = Cuboid.identifyCuboid(cubeDesc, dimensions, metrics);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapper2Test.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapper2Test.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapper2Test.java
index ef7ec96..d47f393 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapper2Test.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapper2Test.java
@@ -83,11 +83,9 @@ public class CubeHFileMapper2Test extends LocalFileMetadataTestCase {
         ImmutableBytesWritable outKey = (ImmutableBytesWritable) outKV[0];
         KeyValue outValue = (KeyValue) outKV[1];
 
-        assertTrue(Bytes.compareTo(key.getBytes(), 0, key.getLength(), outKey.get(), outKey.getOffset(),
-                outKey.getLength()) == 0);
+        assertTrue(Bytes.compareTo(key.getBytes(), 0, key.getLength(), outKey.get(), outKey.getOffset(), outKey.getLength()) == 0);
 
-        assertTrue(Bytes.compareTo(value.getBytes(), 0, value.getLength(), outValue.getValueArray(),
-                outValue.getValueOffset(), outValue.getValueLength()) == 0);
+        assertTrue(Bytes.compareTo(value.getBytes(), 0, value.getLength(), outValue.getValueArray(), outValue.getValueOffset(), outValue.getValueLength()) == 0);
     }
 
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java
index b66895f..03a3cba 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapperTest.java
@@ -82,20 +82,13 @@ public class RangeKeyDistributionMapperTest {
     @Test
     public void testMapperWithHeader() throws IOException {
 
-        Text inputKey1 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 0, 0, 0,
-                0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
-        Text inputKey2 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 0, 0, 0,
-                0, 0, 0, 0, 127, 11, 122, 1, 0, 22, 98, 1, 0, 121, 7 });
-        Text inputKey3 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 2, 2, 2,
-                2, 2, 2, 2, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
-        Text inputKey4 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 3, 3, 3,
-                3, 3, 3, 3, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
-        Text inputKey5 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 4, 4, 4,
-                4, 4, 4, 4, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
-        Text inputKey6 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 5, 5, 5,
-                5, 5, 5, 5, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
-        Text inputKey7 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 6, 6, 6,
-                6, 6, 6, 6, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
+        Text inputKey1 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
+        Text inputKey2 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 0, 0, 0, 0, 0, 0, 0, 127, 11, 122, 1, 0, 22, 98, 1, 0, 121, 7 });
+        Text inputKey3 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 2, 2, 2, 2, 2, 2, 2, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
+        Text inputKey4 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 3, 3, 3, 3, 3, 3, 3, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
+        Text inputKey5 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 4, 4, 4, 4, 4, 4, 4, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
+        Text inputKey6 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 5, 5, 5, 5, 5, 5, 5, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
+        Text inputKey7 = new Text(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 6, 6, 6, 6, 6, 6, 6, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 });
 
         mapDriver.addInput(inputKey1, new Text("abc"));
         mapDriver.addInput(inputKey2, new Text("abc"));
@@ -111,8 +104,7 @@ public class RangeKeyDistributionMapperTest {
 
         byte[] key1 = result.get(0).getFirst().getBytes();
         LongWritable value1 = result.get(0).getSecond();
-        assertArrayEquals(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 6, 6, 6, 6, 6, 6,
-                6, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 }, key1);
+        assertArrayEquals(new byte[] { 0, 0, 0, 0, 0, 0, 0, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7, 6, 6, 6, 6, 6, 6, 6, 127, 11, 56, -23, 0, 22, 98, 1, 0, 121, 7 }, key1);
         assertEquals(273, value1.get());
 
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RowValueDecoderTest.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RowValueDecoderTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RowValueDecoderTest.java
index 33def1c..9b1a00d 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RowValueDecoderTest.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RowValueDecoderTest.java
@@ -52,8 +52,7 @@ public class RowValueDecoderTest extends LocalFileMetadataTestCase {
 
     @Test
     public void testDecode() throws Exception {
-        CubeDesc cubeDesc = CubeManager.getInstance(getTestConfig()).getCube("test_kylin_cube_with_slr_ready")
-                .getDescriptor();
+        CubeDesc cubeDesc = CubeManager.getInstance(getTestConfig()).getCube("test_kylin_cube_with_slr_ready").getDescriptor();
         HBaseColumnDesc hbaseCol = cubeDesc.getHbaseMapping().getColumnFamily()[0].getColumns()[0];
 
         BufferedMeasureCodec codec = new BufferedMeasureCodec(hbaseCol.getMeasures());
@@ -83,8 +82,7 @@ public class RowValueDecoderTest extends LocalFileMetadataTestCase {
 
     @Test(expected = IllegalArgumentException.class)
     public void testError() throws Exception {
-        CubeDesc cubeDesc = CubeManager.getInstance(getTestConfig()).getCube("test_kylin_cube_with_slr_ready")
-                .getDescriptor();
+        CubeDesc cubeDesc = CubeManager.getInstance(getTestConfig()).getCube("test_kylin_cube_with_slr_ready").getDescriptor();
         HBaseColumnDesc hbaseCol = cubeDesc.getHbaseMapping().getColumnFamily()[0].getColumns()[0];
 
         BufferedMeasureCodec codec = new BufferedMeasureCodec(hbaseCol.getMeasures());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java
----------------------------------------------------------------------
diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java
index 2faf3c8..62b154e 100644
--- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java
+++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java
@@ -43,8 +43,7 @@ public class SandboxMetastoreCLI {
         ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath());
         System.setProperty(KylinConfig.KYLIN_CONF, HBaseMetadataTestCase.SANDBOX_TEST_DATA);
         if (StringUtils.isEmpty(System.getProperty("hdp.version"))) {
-            throw new RuntimeException(
-                    "No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.4.0.0-169");
+            throw new RuntimeException("No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.4.0.0-169");
         }
 
         if (args.length < 1) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java b/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java
index 6beb835..19ee08f 100644
--- a/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractor.java
@@ -43,16 +43,13 @@ public abstract class AbstractInfoExtractor extends AbstractApplication {
     private static final Logger logger = LoggerFactory.getLogger(AbstractInfoExtractor.class);
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_DEST = OptionBuilder.withArgName("destDir").hasArg().isRequired(true)
-            .withDescription("specify the dest dir to save the related information").create("destDir");
+    private static final Option OPTION_DEST = OptionBuilder.withArgName("destDir").hasArg().isRequired(true).withDescription("specify the dest dir to save the related information").create("destDir");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_COMPRESS = OptionBuilder.withArgName("compress").hasArg().isRequired(false)
-            .withDescription("specify whether to compress the output with zip. Default true.").create("compress");
+    private static final Option OPTION_COMPRESS = OptionBuilder.withArgName("compress").hasArg().isRequired(false).withDescription("specify whether to compress the output with zip. Default true.").create("compress");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_SUBMODULE = OptionBuilder.withArgName("submodule").hasArg().isRequired(false)
-            .withDescription("specify whether this is a submodule of other CLI tool").create("submodule");
+    private static final Option OPTION_SUBMODULE = OptionBuilder.withArgName("submodule").hasArg().isRequired(false).withDescription("specify whether this is a submodule of other CLI tool").create("submodule");
 
     private static final String DEFAULT_PACKAGE_TYPE = "base";
     private static final String[] COMMIT_SHA1_FILES = { "commit_SHA1", "commit.sha1" };
@@ -79,10 +76,8 @@ public abstract class AbstractInfoExtractor extends AbstractApplication {
     @Override
     protected void execute(OptionsHelper optionsHelper) throws Exception {
         String exportDest = optionsHelper.getOptionValue(options.getOption("destDir"));
-        boolean shouldCompress = optionsHelper.hasOption(OPTION_COMPRESS)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_COMPRESS)) : true;
-        boolean isSubmodule = optionsHelper.hasOption(OPTION_SUBMODULE)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_SUBMODULE)) : false;
+        boolean shouldCompress = optionsHelper.hasOption(OPTION_COMPRESS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_COMPRESS)) : true;
+        boolean isSubmodule = optionsHelper.hasOption(OPTION_SUBMODULE) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_SUBMODULE)) : false;
 
         if (StringUtils.isEmpty(exportDest)) {
             throw new RuntimeException("destDir is not set, exit directly without extracting");
@@ -92,8 +87,7 @@ public abstract class AbstractInfoExtractor extends AbstractApplication {
         }
 
         // create new folder to contain the output
-        String packageName = packageType.toLowerCase() + "_"
-                + new SimpleDateFormat("YYYY_MM_dd_HH_mm_ss").format(new Date());
+        String packageName = packageType.toLowerCase() + "_" + new SimpleDateFormat("YYYY_MM_dd_HH_mm_ss").format(new Date());
         if (!isSubmodule && new File(exportDest).exists()) {
             exportDest = exportDest + packageName + "/";
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java b/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java
index fabee87..6c8a6b0 100644
--- a/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/AclTableMigrationCLI.java
@@ -40,8 +40,7 @@ public class AclTableMigrationCLI {
         case CHECK:
             boolean needMigrate = tool.checkIfNeedMigrate(KylinConfig.getInstanceFromEnv());
             if (needMigrate) {
-                System.out.println(
-                        "Found acl tables that need to migrate. Please execute command : ${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.tool.AclTableMigrationCLI MIGRATE");
+                System.out.println("Found acl tables that need to migrate. Please execute command : ${KYLIN_HOME}/bin/kylin.sh org.apache.kylin.tool.AclTableMigrationCLI MIGRATE");
                 System.exit(1);
             }
             break;

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/CubeMetaExtractor.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/CubeMetaExtractor.java b/tool/src/main/java/org/apache/kylin/tool/CubeMetaExtractor.java
index 39e316c..30bbb5e 100644
--- a/tool/src/main/java/org/apache/kylin/tool/CubeMetaExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/CubeMetaExtractor.java
@@ -77,49 +77,31 @@ public class CubeMetaExtractor extends AbstractInfoExtractor {
     private static final Logger logger = LoggerFactory.getLogger(CubeMetaExtractor.class);
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false)
-            .withDescription("Specify which cube to extract").create("cube");
+    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("Specify which cube to extract").create("cube");
     @SuppressWarnings("static-access")
-    private static final Option OPTION_HYBRID = OptionBuilder.withArgName("hybrid").hasArg().isRequired(false)
-            .withDescription("Specify which hybrid to extract").create("hybrid");
+    private static final Option OPTION_HYBRID = OptionBuilder.withArgName("hybrid").hasArg().isRequired(false).withDescription("Specify which hybrid to extract").create("hybrid");
     @SuppressWarnings("static-access")
-    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false)
-            .withDescription("Specify realizations in which project to extract").create("project");
+    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false).withDescription("Specify realizations in which project to extract").create("project");
     @SuppressWarnings("static-access")
-    private static final Option OPTION_All_PROJECT = OptionBuilder.withArgName("allProjects").hasArg(false)
-            .isRequired(false).withDescription("Specify realizations in all projects to extract").create("allProjects");
+    private static final Option OPTION_All_PROJECT = OptionBuilder.withArgName("allProjects").hasArg(false).isRequired(false).withDescription("Specify realizations in all projects to extract").create("allProjects");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_STORAGE_TYPE = OptionBuilder.withArgName("storageType").hasArg()
-            .isRequired(false).withDescription("Specify the storage type to overwrite. Default is empty, keep origin.")
-            .create("storageType");
+    private static final Option OPTION_STORAGE_TYPE = OptionBuilder.withArgName("storageType").hasArg().isRequired(false).withDescription("Specify the storage type to overwrite. Default is empty, keep origin.").create("storageType");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_ENGINE_TYPE = OptionBuilder.withArgName("engineType").hasArg().isRequired(false)
-            .withDescription("Specify the engine type to overwrite. Default is empty, keep origin.")
-            .create("engineType");
+    private static final Option OPTION_ENGINE_TYPE = OptionBuilder.withArgName("engineType").hasArg().isRequired(false).withDescription("Specify the engine type to overwrite. Default is empty, keep origin.").create("engineType");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_SEGMENTS = OptionBuilder.withArgName("includeSegments").hasArg()
-            .isRequired(false).withDescription("set this to true if want extract the segments info. Default true")
-            .create("includeSegments");
+    private static final Option OPTION_INCLUDE_SEGMENTS = OptionBuilder.withArgName("includeSegments").hasArg().isRequired(false).withDescription("set this to true if want extract the segments info. Default true").create("includeSegments");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_JOB = OptionBuilder.withArgName("includeJobs").hasArg().isRequired(false)
-            .withDescription("set this to true if want to extract job info/outputs too. Default false")
-            .create("includeJobs");
+    private static final Option OPTION_INCLUDE_JOB = OptionBuilder.withArgName("includeJobs").hasArg().isRequired(false).withDescription("set this to true if want to extract job info/outputs too. Default false").create("includeJobs");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_ONLY_JOB_OUTPUT = OptionBuilder.withArgName("onlyOutput").hasArg()
-            .isRequired(false).withDescription("when include jobs, onlt extract output of job. Default true")
-            .create("onlyOutput");
+    private static final Option OPTION_INCLUDE_ONLY_JOB_OUTPUT = OptionBuilder.withArgName("onlyOutput").hasArg().isRequired(false).withDescription("when include jobs, onlt extract output of job. Default true").create("onlyOutput");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_SEGMENT_DETAILS = OptionBuilder.withArgName("includeSegmentDetails")
-            .hasArg().isRequired(false)
-            .withDescription(
-                    "set this to true if want to extract segment details too, such as dict, tablesnapshot. Default false")
-            .create("includeSegmentDetails");
+    private static final Option OPTION_INCLUDE_SEGMENT_DETAILS = OptionBuilder.withArgName("includeSegmentDetails").hasArg().isRequired(false).withDescription("set this to true if want to extract segment details too, such as dict, tablesnapshot. Default false").create("includeSegmentDetails");
 
     private KylinConfig kylinConfig;
     private MetadataManager metadataManager;
@@ -166,18 +148,12 @@ public class CubeMetaExtractor extends AbstractInfoExtractor {
 
     @Override
     protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception {
-        includeSegments = optionsHelper.hasOption(OPTION_INCLUDE_SEGMENTS)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_SEGMENTS)) : true;
-        includeJobs = optionsHelper.hasOption(OPTION_INCLUDE_JOB)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_JOB)) : false;
-        includeSegmentDetails = optionsHelper.hasOption(OPTION_INCLUDE_SEGMENT_DETAILS)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_SEGMENT_DETAILS)) : false;
-        onlyJobOutput = optionsHelper.hasOption(OPTION_INCLUDE_ONLY_JOB_OUTPUT)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_ONLY_JOB_OUTPUT)) : true;
-        storageType = optionsHelper.hasOption(OPTION_STORAGE_TYPE) ? optionsHelper.getOptionValue(OPTION_STORAGE_TYPE)
-                : null;
-        engineType = optionsHelper.hasOption(OPTION_ENGINE_TYPE) ? optionsHelper.getOptionValue(OPTION_ENGINE_TYPE)
-                : null;
+        includeSegments = optionsHelper.hasOption(OPTION_INCLUDE_SEGMENTS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_SEGMENTS)) : true;
+        includeJobs = optionsHelper.hasOption(OPTION_INCLUDE_JOB) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_JOB)) : false;
+        includeSegmentDetails = optionsHelper.hasOption(OPTION_INCLUDE_SEGMENT_DETAILS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_SEGMENT_DETAILS)) : false;
+        onlyJobOutput = optionsHelper.hasOption(OPTION_INCLUDE_ONLY_JOB_OUTPUT) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_ONLY_JOB_OUTPUT)) : true;
+        storageType = optionsHelper.hasOption(OPTION_STORAGE_TYPE) ? optionsHelper.getOptionValue(OPTION_STORAGE_TYPE) : null;
+        engineType = optionsHelper.hasOption(OPTION_ENGINE_TYPE) ? optionsHelper.getOptionValue(OPTION_ENGINE_TYPE) : null;
 
         kylinConfig = KylinConfig.getInstanceFromEnv();
         metadataManager = MetadataManager.getInstance(kylinConfig);
@@ -262,9 +238,7 @@ public class CubeMetaExtractor extends AbstractInfoExtractor {
                 try {
                     ResourceTool.copy(srcConfig, dstConfig, Lists.newArrayList(r));
                 } catch (Exception e) {
-                    logger.warn(
-                            "Exception when copying optional resource {}. May be caused by resource missing. skip it.",
-                            r);
+                    logger.warn("Exception when copying optional resource {}. May be caused by resource missing. skip it.", r);
                 }
             }
 
@@ -322,8 +296,7 @@ public class CubeMetaExtractor extends AbstractInfoExtractor {
     private void dealWithStreaming(CubeInstance cube) {
         streamingManager = StreamingManager.getInstance(kylinConfig);
         for (StreamingConfig streamingConfig : streamingManager.listAllStreaming()) {
-            if (streamingConfig.getName() != null
-                    && streamingConfig.getName().equalsIgnoreCase(cube.getRootFactTable())) {
+            if (streamingConfig.getName() != null && streamingConfig.getName().equalsIgnoreCase(cube.getRootFactTable())) {
                 addRequired(StreamingConfig.concatResourcePath(streamingConfig.getName()));
                 addRequired(KafkaConfig.concatResourcePath(streamingConfig.getName()));
             }
@@ -405,8 +378,7 @@ public class CubeMetaExtractor extends AbstractInfoExtractor {
             addRequired(HybridInstance.concatResourcePath(hybridInstance.getName()));
             for (IRealization iRealization : hybridInstance.getRealizations()) {
                 if (iRealization.getType() != RealizationType.CUBE) {
-                    throw new RuntimeException("Hybrid " + iRealization.getName() + " contains non cube child "
-                            + iRealization.getName() + " with type " + iRealization.getType());
+                    throw new RuntimeException("Hybrid " + iRealization.getName() + " contains non cube child " + iRealization.getName() + " with type " + iRealization.getType());
                 }
                 retrieveResourcePath(iRealization);
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/CubeMetaIngester.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/CubeMetaIngester.java b/tool/src/main/java/org/apache/kylin/tool/CubeMetaIngester.java
index 0b145d6..b4c44c3 100644
--- a/tool/src/main/java/org/apache/kylin/tool/CubeMetaIngester.java
+++ b/tool/src/main/java/org/apache/kylin/tool/CubeMetaIngester.java
@@ -60,26 +60,16 @@ public class CubeMetaIngester extends AbstractApplication {
     private static final Logger logger = LoggerFactory.getLogger(CubeMetaIngester.class);
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_SRC = OptionBuilder.withArgName("srcPath").hasArg().isRequired(true)
-            .withDescription("specify the path to the extracted cube metadata zip file").create("srcPath");
+    private static final Option OPTION_SRC = OptionBuilder.withArgName("srcPath").hasArg().isRequired(true).withDescription("specify the path to the extracted cube metadata zip file").create("srcPath");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(true)
-            .withDescription("specify the target project for the new cubes").create("project");
+    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(true).withDescription("specify the target project for the new cubes").create("project");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_FORCE_INGEST = OptionBuilder.withArgName("forceIngest").hasArg()
-            .isRequired(false)
-            .withDescription(
-                    "skip the target cube, model and table check and ingest by force. Use in caution because it might break existing cubes! Suggest to backup metadata store first")
-            .create("forceIngest");
+    private static final Option OPTION_FORCE_INGEST = OptionBuilder.withArgName("forceIngest").hasArg().isRequired(false).withDescription("skip the target cube, model and table check and ingest by force. Use in caution because it might break existing cubes! Suggest to backup metadata store first").create("forceIngest");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_OVERWRITE_TABLES = OptionBuilder.withArgName("overwriteTables").hasArg()
-            .isRequired(false)
-            .withDescription(
-                    "If table meta conflicts, overwrite the one in metadata store with the one in srcPath. Use in caution because it might break existing cubes! Suggest to backup metadata store first")
-            .create("overwriteTables");
+    private static final Option OPTION_OVERWRITE_TABLES = OptionBuilder.withArgName("overwriteTables").hasArg().isRequired(false).withDescription("If table meta conflicts, overwrite the one in metadata store with the one in srcPath. Use in caution because it might break existing cubes! Suggest to backup metadata store first").create("overwriteTables");
 
     private KylinConfig kylinConfig;
     private MetadataManager metadataManager;
@@ -158,8 +148,7 @@ public class CubeMetaIngester extends AbstractApplication {
 
         for (TableDesc tableDesc : srcMetadataManager.listAllTables()) {
             logger.info("add " + tableDesc + " to " + targetProjectName);
-            projectManager.addTableDescToProject(Lists.newArrayList(tableDesc.getIdentity()).toArray(new String[0]),
-                    targetProjectName);
+            projectManager.addTableDescToProject(Lists.newArrayList(tableDesc.getIdentity()).toArray(new String[0]), targetProjectName);
         }
 
         for (CubeInstance cube : srcCubeManager.listAllCubes()) {
@@ -170,8 +159,7 @@ public class CubeMetaIngester extends AbstractApplication {
 
     }
 
-    private void checkAndMark(MetadataManager srcMetadataManager, HybridManager srcHybridManager,
-            CubeManager srcCubeManager, CubeDescManager srcCubeDescManager) {
+    private void checkAndMark(MetadataManager srcMetadataManager, HybridManager srcHybridManager, CubeManager srcCubeManager, CubeDescManager srcCubeDescManager) {
         if (srcHybridManager.listHybridInstances().size() > 0) {
             throw new IllegalStateException("Does not support ingest hybrid yet");
         }
@@ -184,15 +172,12 @@ public class CubeMetaIngester extends AbstractApplication {
         for (TableDesc tableDesc : srcMetadataManager.listAllTables()) {
             TableDesc existing = metadataManager.getTableDesc(tableDesc.getIdentity());
             if (existing != null && !existing.equals(tableDesc)) {
-                logger.info("Table {} already has a different version in target metadata store",
-                        tableDesc.getIdentity());
+                logger.info("Table {} already has a different version in target metadata store", tableDesc.getIdentity());
                 logger.info("Existing version: " + existing);
                 logger.info("New version: " + tableDesc);
 
                 if (!forceIngest && !overwriteTables) {
-                    throw new IllegalStateException(
-                            "table already exists with a different version: " + tableDesc.getIdentity()
-                                    + ". Consider adding -overwriteTables option to force overwriting (with caution)");
+                    throw new IllegalStateException("table already exists with a different version: " + tableDesc.getIdentity() + ". Consider adding -overwriteTables option to force overwriting (with caution)");
                 } else {
                     logger.warn("Overwriting the old table desc: " + tableDesc.getIdentity());
                 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
index 36feef8..08d4292 100644
--- a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCLI.java
@@ -108,22 +108,12 @@ public class CubeMigrationCLI {
     }
 
     protected void usage() {
-        System.out.println(
-                "Usage: CubeMigrationCLI srcKylinConfigUri dstKylinConfigUri cubeName projectName copyAclOrNot purgeOrNot overwriteIfExists realExecute");
-        System.out.println(" srcKylinConfigUri: The KylinConfig of the cube’s source \n"
-                + "dstKylinConfigUri: The KylinConfig of the cube’s new home \n"
-                + "cubeName: the name of cube to be migrated. \n"
-                + "projectName: The target project in the target environment.(Make sure it exist) \n"
-                + "copyAclOrNot: true or false: whether copy cube ACL to target environment. \n"
-                + "purgeOrNot: true or false: whether purge the cube from src server after the migration. \n"
-                + "overwriteIfExists: overwrite cube if it already exists in the target environment. \n"
-                + "realExecute: if false, just print the operations to take, if true, do the real migration. \n");
+        System.out.println("Usage: CubeMigrationCLI srcKylinConfigUri dstKylinConfigUri cubeName projectName copyAclOrNot purgeOrNot overwriteIfExists realExecute");
+        System.out.println(" srcKylinConfigUri: The KylinConfig of the cube’s source \n" + "dstKylinConfigUri: The KylinConfig of the cube’s new home \n" + "cubeName: the name of cube to be migrated. \n" + "projectName: The target project in the target environment.(Make sure it exist) \n" + "copyAclOrNot: true or false: whether copy cube ACL to target environment. \n" + "purgeOrNot: true or false: whether purge the cube from src server after the migration. \n" + "overwriteIfExists: overwrite cube if it already exists in the target environment. \n" + "realExecute: if false, just print the operations to take, if true, do the real migration. \n");
 
     }
 
-    public void moveCube(KylinConfig srcCfg, KylinConfig dstCfg, String cubeName, String projectName, String copyAcl,
-            String purgeAndDisable, String overwriteIfExists, String realExecute)
-            throws IOException, InterruptedException {
+    public void moveCube(KylinConfig srcCfg, KylinConfig dstCfg, String cubeName, String projectName, String copyAcl, String purgeAndDisable, String overwriteIfExists, String realExecute) throws IOException, InterruptedException {
 
         srcConfig = srcCfg;
         srcStore = ResourceStore.getStore(srcConfig);
@@ -173,12 +163,9 @@ public class CubeMigrationCLI {
         }
     }
 
-    public void moveCube(String srcCfgUri, String dstCfgUri, String cubeName, String projectName, String copyAcl,
-            String purgeAndDisable, String overwriteIfExists, String realExecute)
-            throws IOException, InterruptedException {
+    public void moveCube(String srcCfgUri, String dstCfgUri, String cubeName, String projectName, String copyAcl, String purgeAndDisable, String overwriteIfExists, String realExecute) throws IOException, InterruptedException {
 
-        moveCube(KylinConfig.createInstanceFromUri(srcCfgUri), KylinConfig.createInstanceFromUri(dstCfgUri), cubeName,
-                projectName, copyAcl, purgeAndDisable, overwriteIfExists, realExecute);
+        moveCube(KylinConfig.createInstanceFromUri(srcCfgUri), KylinConfig.createInstanceFromUri(dstCfgUri), cubeName, projectName, copyAcl, purgeAndDisable, overwriteIfExists, realExecute);
     }
 
     public void checkMigrationSuccess(KylinConfig kylinConfig, String cubeName, Boolean ifFix) throws IOException {
@@ -213,14 +200,12 @@ public class CubeMigrationCLI {
         if (cube.getDescriptor().getEngineType() != IStorageAware.ID_SHARDED_HBASE)
             return;
         for (CubeSegment segment : cube.getSegments()) {
-            operations
-                    .add(new Opt(OptType.CHANGE_HTABLE_HOST, new Object[] { segment.getStorageLocationIdentifier() }));
+            operations.add(new Opt(OptType.CHANGE_HTABLE_HOST, new Object[] { segment.getStorageLocationIdentifier() }));
         }
     }
 
     private void copyACL(CubeInstance cube, String projectName) {
-        operations.add(new Opt(OptType.COPY_ACL,
-                new Object[] { cube.getUuid(), cube.getDescriptor().getModel().getUuid(), projectName }));
+        operations.add(new Opt(OptType.COPY_ACL, new Object[] { cube.getUuid(), cube.getDescriptor().getModel().getUuid(), projectName }));
     }
 
     private void copyFilesInMetaStore(CubeInstance cube, String overwriteIfExists) throws IOException {
@@ -230,8 +215,7 @@ public class CubeMigrationCLI {
         listCubeRelatedResources(cube, metaItems, dictAndSnapshot);
 
         if (dstStore.exists(cube.getResourcePath()) && !overwriteIfExists.equalsIgnoreCase("true"))
-            throw new IllegalStateException("The cube named " + cube.getName()
-                    + " already exists on target metadata store. Use overwriteIfExists to overwrite it");
+            throw new IllegalStateException("The cube named " + cube.getName() + " already exists on target metadata store. Use overwriteIfExists to overwrite it");
 
         for (String item : metaItems) {
             operations.add(new Opt(OptType.COPY_FILE_IN_META, new Object[] { item }));
@@ -242,8 +226,7 @@ public class CubeMigrationCLI {
         }
     }
 
-    private void addCubeAndModelIntoProject(CubeInstance srcCube, String cubeName, String projectName)
-            throws IOException {
+    private void addCubeAndModelIntoProject(CubeInstance srcCube, String cubeName, String projectName) throws IOException {
         String projectResPath = ProjectInstance.concatResourcePath(projectName);
         if (!dstStore.exists(projectResPath))
             throw new IllegalStateException("The target project " + projectName + " does not exist");
@@ -255,8 +238,7 @@ public class CubeMigrationCLI {
         operations.add(new Opt(OptType.PURGE_AND_DISABLE, new Object[] { cubeName }));
     }
 
-    protected void listCubeRelatedResources(CubeInstance cube, List<String> metaResource, Set<String> dictAndSnapshot)
-            throws IOException {
+    protected void listCubeRelatedResources(CubeInstance cube, List<String> metaResource, Set<String> dictAndSnapshot) throws IOException {
 
         CubeDesc cubeDesc = cube.getDescriptor();
         metaResource.add(cube.getResourcePath());
@@ -473,10 +455,8 @@ public class CubeMigrationCLI {
             Table srcAclHtable = null;
             Table destAclHtable = null;
             try {
-                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl())
-                        .getTable(TableName.valueOf(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl())
-                        .getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                srcAclHtable = HBaseConnection.get(srcConfig.getStorageUrl()).getTable(TableName.valueOf(srcConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
 
                 // cube acl
                 Result result = srcAclHtable.get(new Get(Bytes.toBytes(cubeId)));
@@ -487,10 +467,8 @@ public class CubeMigrationCLI {
                         byte[] value = CellUtil.cloneValue(cell);
 
                         // use the target project uuid as the parent
-                        if (Bytes.toString(family).equals(ACL_INFO_FAMILY)
-                                && Bytes.toString(column).equals(ACL_INFO_FAMILY_PARENT_COLUMN)) {
-                            String valueString = "{\"id\":\"" + projUUID
-                                    + "\",\"type\":\"org.apache.kylin.metadata.project.ProjectInstance\"}";
+                        if (Bytes.toString(family).equals(ACL_INFO_FAMILY) && Bytes.toString(column).equals(ACL_INFO_FAMILY_PARENT_COLUMN)) {
+                            String valueString = "{\"id\":\"" + projUUID + "\",\"type\":\"org.apache.kylin.metadata.project.ProjectInstance\"}";
                             value = Bytes.toBytes(valueString);
                         }
                         Put put = new Put(Bytes.toBytes(cubeId));
@@ -565,8 +543,7 @@ public class CubeMigrationCLI {
             String modelId = (String) opt.params[1];
             Table destAclHtable = null;
             try {
-                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl())
-                        .getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
+                destAclHtable = HBaseConnection.get(dstConfig.getStorageUrl()).getTable(TableName.valueOf(dstConfig.getMetadataUrlPrefix() + ACL_TABLE_NAME));
 
                 destAclHtable.delete(new Delete(Bytes.toBytes(cubeId)));
                 destAclHtable.delete(new Delete(Bytes.toBytes(modelId)));

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCheckCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCheckCLI.java b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCheckCLI.java
index 040c702..54fbbc0 100644
--- a/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCheckCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/CubeMigrationCheckCLI.java
@@ -54,14 +54,11 @@ public class CubeMigrationCheckCLI {
 
     private static final Logger logger = LoggerFactory.getLogger(CubeMigrationCheckCLI.class);
 
-    private static final Option OPTION_FIX = OptionBuilder.withArgName("fix").hasArg().isRequired(false)
-            .withDescription("Fix the inconsistent cube segments' HOST").create("fix");
+    private static final Option OPTION_FIX = OptionBuilder.withArgName("fix").hasArg().isRequired(false).withDescription("Fix the inconsistent cube segments' HOST").create("fix");
 
-    private static final Option OPTION_DST_CFG_URI = OptionBuilder.withArgName("dstCfgUri").hasArg().isRequired(false)
-            .withDescription("The KylinConfig of the cube’s new home").create("dstCfgUri");
+    private static final Option OPTION_DST_CFG_URI = OptionBuilder.withArgName("dstCfgUri").hasArg().isRequired(false).withDescription("The KylinConfig of the cube’s new home").create("dstCfgUri");
 
-    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false)
-            .withDescription("The name of cube migrated").create("cube");
+    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("The name of cube migrated").create("cube");
 
     private KylinConfig dstCfg;
     private HBaseAdmin hbaseAdmin;
@@ -191,9 +188,7 @@ public class CubeMigrationCheckCLI {
             for (String segFullName : inconsistentHTables) {
                 String[] sepNameList = segFullName.split(",");
                 HTableDescriptor desc = hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0]));
-                logger.info("Change the host of htable " + sepNameList[0] + "belonging to cube " + sepNameList[1]
-                        + " from " + desc.getValue(IRealizationConstants.HTableTag) + " to "
-                        + dstCfg.getMetadataUrlPrefix());
+                logger.info("Change the host of htable " + sepNameList[0] + "belonging to cube " + sepNameList[1] + " from " + desc.getValue(IRealizationConstants.HTableTag) + " to " + dstCfg.getMetadataUrlPrefix());
                 hbaseAdmin.disableTable(sepNameList[0]);
                 desc.setValue(IRealizationConstants.HTableTag, dstCfg.getMetadataUrlPrefix());
                 hbaseAdmin.modifyTable(sepNameList[0], desc);
@@ -213,8 +208,7 @@ public class CubeMigrationCheckCLI {
         logger.info("------ HTables exist issues in hbase : not existing, metadata broken ------");
         for (String segFullName : issueExistHTables) {
             String[] sepNameList = segFullName.split(",");
-            logger.error(sepNameList[0] + " belonging to cube " + sepNameList[1]
-                    + " has some issues and cannot be read successfully!!!");
+            logger.error(sepNameList[0] + " belonging to cube " + sepNameList[1] + " has some issues and cannot be read successfully!!!");
         }
         logger.info("----------------------------------------------------");
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java b/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
index 4c34f47..e1f994f 100644
--- a/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
@@ -46,36 +46,25 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
     private static final int DEFAULT_PERIOD = 3;
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false)
-            .withDescription("Specify realizations in which project to extract").create("project");
+    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false).withDescription("Specify realizations in which project to extract").create("project");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_CONF = OptionBuilder.withArgName("includeConf").hasArg()
-            .isRequired(false).withDescription("Specify whether to include conf files to extract. Default true.")
-            .create("includeConf");
+    private static final Option OPTION_INCLUDE_CONF = OptionBuilder.withArgName("includeConf").hasArg().isRequired(false).withDescription("Specify whether to include conf files to extract. Default true.").create("includeConf");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_HBASE = OptionBuilder.withArgName("includeHBase").hasArg()
-            .isRequired(false).withDescription("Specify whether to include hbase files to extract. Default true.")
-            .create("includeHBase");
+    private static final Option OPTION_INCLUDE_HBASE = OptionBuilder.withArgName("includeHBase").hasArg().isRequired(false).withDescription("Specify whether to include hbase files to extract. Default true.").create("includeHBase");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_CLIENT = OptionBuilder.withArgName("includeClient").hasArg()
-            .isRequired(false).withDescription("Specify whether to include client info to extract. Default true.")
-            .create("includeClient");
+    private static final Option OPTION_INCLUDE_CLIENT = OptionBuilder.withArgName("includeClient").hasArg().isRequired(false).withDescription("Specify whether to include client info to extract. Default true.").create("includeClient");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_JOB = OptionBuilder.withArgName("includeJobs").hasArg().isRequired(false)
-            .withDescription("Specify whether to include job info to extract. Default true.").create("includeJobs");
+    private static final Option OPTION_INCLUDE_JOB = OptionBuilder.withArgName("includeJobs").hasArg().isRequired(false).withDescription("Specify whether to include job info to extract. Default true.").create("includeJobs");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_THREADS = OptionBuilder.withArgName("threads").hasArg().isRequired(false)
-            .withDescription("Specify number of threads for parallel extraction.").create("threads");
+    private static final Option OPTION_THREADS = OptionBuilder.withArgName("threads").hasArg().isRequired(false).withDescription("Specify number of threads for parallel extraction.").create("threads");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_PERIOD = OptionBuilder.withArgName("period").hasArg().isRequired(false)
-            .withDescription("specify how many days of kylin info to extract. Default " + DEFAULT_PERIOD + ".")
-            .create("period");
+    private static final Option OPTION_PERIOD = OptionBuilder.withArgName("period").hasArg().isRequired(false).withDescription("specify how many days of kylin info to extract. Default " + DEFAULT_PERIOD + ".").create("period");
 
     private static final int DEFAULT_PARALLEL_SIZE = 4;
 
@@ -119,19 +108,13 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
     @Override
     protected void executeExtract(final OptionsHelper optionsHelper, final File exportDir) throws IOException {
         final String projectInput = optionsHelper.getOptionValue(options.getOption("project"));
-        final boolean includeConf = optionsHelper.hasOption(OPTION_INCLUDE_CONF)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CONF)) : true;
-        final boolean includeHBase = optionsHelper.hasOption(OPTION_INCLUDE_HBASE)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_HBASE)) : true;
-        final boolean includeClient = optionsHelper.hasOption(OPTION_INCLUDE_CLIENT)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CLIENT)) : true;
-        final boolean includeJob = optionsHelper.hasOption(OPTION_INCLUDE_JOB)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_JOB)) : true;
-        final int threadsNum = optionsHelper.hasOption(OPTION_THREADS)
-                ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_THREADS)) : DEFAULT_PARALLEL_SIZE;
+        final boolean includeConf = optionsHelper.hasOption(OPTION_INCLUDE_CONF) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CONF)) : true;
+        final boolean includeHBase = optionsHelper.hasOption(OPTION_INCLUDE_HBASE) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_HBASE)) : true;
+        final boolean includeClient = optionsHelper.hasOption(OPTION_INCLUDE_CLIENT) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CLIENT)) : true;
+        final boolean includeJob = optionsHelper.hasOption(OPTION_INCLUDE_JOB) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_JOB)) : true;
+        final int threadsNum = optionsHelper.hasOption(OPTION_THREADS) ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_THREADS)) : DEFAULT_PARALLEL_SIZE;
         final String projectNames = StringUtils.join(getProjects(projectInput), ",");
-        final int period = optionsHelper.hasOption(OPTION_PERIOD)
-                ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_PERIOD)) : DEFAULT_PERIOD;
+        final int period = optionsHelper.hasOption(OPTION_PERIOD) ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_PERIOD)) : DEFAULT_PERIOD;
 
         logger.info("Start diagnosis info extraction in {} threads.", threadsNum);
         executorService = Executors.newFixedThreadPool(threadsNum);
@@ -142,8 +125,7 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
             public void run() {
                 logger.info("Start to extract metadata.");
                 try {
-                    String[] cubeMetaArgs = { "-destDir", new File(exportDir, "metadata").getAbsolutePath(), "-project",
-                            projectNames, "-compress", "false", "-includeJobs", "false", "-submodule", "true" };
+                    String[] cubeMetaArgs = { "-destDir", new File(exportDir, "metadata").getAbsolutePath(), "-project", projectNames, "-compress", "false", "-includeJobs", "false", "-submodule", "true" };
                     CubeMetaExtractor cubeMetaExtractor = new CubeMetaExtractor();
                     logger.info("CubeMetaExtractor args: " + Arrays.toString(cubeMetaArgs));
                     cubeMetaExtractor.execute(cubeMetaArgs);
@@ -160,8 +142,7 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
                 public void run() {
                     logger.info("Start to extract jobs.");
                     try {
-                        String[] jobArgs = { "-destDir", new File(exportDir, "jobs").getAbsolutePath(), "-period",
-                                Integer.toString(period), "-compress", "false", "-submodule", "true" };
+                        String[] jobArgs = { "-destDir", new File(exportDir, "jobs").getAbsolutePath(), "-period", Integer.toString(period), "-compress", "false", "-submodule", "true" };
                         JobInstanceExtractor jobInstanceExtractor = new JobInstanceExtractor();
                         jobInstanceExtractor.execute(jobArgs);
                     } catch (Exception e) {
@@ -178,8 +159,7 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
                 public void run() {
                     logger.info("Start to extract HBase usage.");
                     try {
-                        String[] hbaseArgs = { "-destDir", new File(exportDir, "hbase").getAbsolutePath(), "-project",
-                                projectNames, "-compress", "false", "-submodule", "true" };
+                        String[] hbaseArgs = { "-destDir", new File(exportDir, "hbase").getAbsolutePath(), "-project", projectNames, "-compress", "false", "-submodule", "true" };
                         HBaseUsageExtractor hBaseUsageExtractor = new HBaseUsageExtractor();
                         logger.info("HBaseUsageExtractor args: " + Arrays.toString(hbaseArgs));
                         hBaseUsageExtractor.execute(hbaseArgs);
@@ -200,8 +180,7 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
                         File destConfDir = new File(exportDir, "conf");
                         FileUtils.forceMkdir(destConfDir);
                         File srcConfDir = new File(ToolUtil.getConfFolder());
-                        Preconditions.checkState(srcConfDir.exists(),
-                                "Cannot find config dir: " + srcConfDir.getAbsolutePath());
+                        Preconditions.checkState(srcConfDir.exists(), "Cannot find config dir: " + srcConfDir.getAbsolutePath());
                         File[] confFiles = srcConfDir.listFiles();
                         if (confFiles != null) {
                             for (File confFile : confFiles) {
@@ -221,8 +200,7 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
                 @Override
                 public void run() {
                     try {
-                        String[] clientArgs = { "-destDir", new File(exportDir, "client").getAbsolutePath(),
-                                "-compress", "false", "-submodule", "true" };
+                        String[] clientArgs = { "-destDir", new File(exportDir, "client").getAbsolutePath(), "-compress", "false", "-submodule", "true" };
                         ClientEnvExtractor clientEnvExtractor = new ClientEnvExtractor();
                         logger.info("ClientEnvExtractor args: " + Arrays.toString(clientArgs));
                         clientEnvExtractor.execute(clientArgs);
@@ -239,8 +217,7 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
             public void run() {
                 logger.info("Start to extract logs.");
                 try {
-                    String[] logsArgs = { "-destDir", new File(exportDir, "logs").getAbsolutePath(), "-logPeriod",
-                            Integer.toString(period), "-compress", "false", "-submodule", "true" };
+                    String[] logsArgs = { "-destDir", new File(exportDir, "logs").getAbsolutePath(), "-logPeriod", Integer.toString(period), "-compress", "false", "-submodule", "true" };
                     KylinLogExtractor logExtractor = new KylinLogExtractor();
                     logger.info("KylinLogExtractor args: " + Arrays.toString(logsArgs));
                     logExtractor.execute(logsArgs);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java b/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
index 3df0cbf..f52fc3e 100644
--- a/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
@@ -118,8 +118,7 @@ public class ExtendCubeToHybridCLI {
     }
 
     public void createFromCube(String projectName, String cubeName, String partitionDateStr) throws Exception {
-        logger.info("Create hybrid for cube[" + cubeName + "], project[" + projectName + "], partition_date["
-                + partitionDateStr + "].");
+        logger.info("Create hybrid for cube[" + cubeName + "], project[" + projectName + "], partition_date[" + partitionDateStr + "].");
 
         CubeInstance cubeInstance = cubeManager.getCube(cubeName);
         if (!validateCubeInstance(cubeInstance)) {
@@ -153,8 +152,7 @@ public class ExtendCubeToHybridCLI {
         CubeSegment currentSeg = null;
         while (segmentIterator.hasNext()) {
             currentSeg = segmentIterator.next();
-            if (partitionDateStr != null && (currentSeg.getDateRangeStart() >= partitionDate
-                    || currentSeg.getDateRangeEnd() > partitionDate)) {
+            if (partitionDateStr != null && (currentSeg.getDateRangeStart() >= partitionDate || currentSeg.getDateRangeEnd() > partitionDate)) {
                 segmentIterator.remove();
                 logger.info("CubeSegment[" + currentSeg + "] was removed.");
             }
@@ -197,11 +195,9 @@ public class ExtendCubeToHybridCLI {
         List<RealizationEntry> realizationEntries = Lists.newArrayListWithCapacity(2);
         realizationEntries.add(RealizationEntry.create(RealizationType.CUBE, cubeInstance.getName()));
         realizationEntries.add(RealizationEntry.create(RealizationType.CUBE, newCubeInstance.getName()));
-        HybridInstance hybridInstance = HybridInstance.create(kylinConfig, renameHybrid(cubeInstance.getName()),
-                realizationEntries);
+        HybridInstance hybridInstance = HybridInstance.create(kylinConfig, renameHybrid(cubeInstance.getName()), realizationEntries);
         store.putResource(hybridInstance.getResourcePath(), hybridInstance, HybridManager.HYBRID_SERIALIZER);
-        ProjectManager.getInstance(kylinConfig).moveRealizationToProject(RealizationType.HYBRID,
-                hybridInstance.getName(), projectName, owner);
+        ProjectManager.getInstance(kylinConfig).moveRealizationToProject(RealizationType.HYBRID, hybridInstance.getName(), projectName, owner);
         logger.info("HybridInstance was saved at: " + hybridInstance.getResourcePath());
 
         // copy Acl from old cube to new cube
@@ -238,8 +234,7 @@ public class ExtendCubeToHybridCLI {
         String projUUID = project.getUuid();
         Table aclHtable = null;
         try {
-            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl())
-                    .getTable(TableName.valueOf(kylinConfig.getMetadataUrlPrefix() + "_acl"));
+            aclHtable = HBaseConnection.get(kylinConfig.getStorageUrl()).getTable(TableName.valueOf(kylinConfig.getMetadataUrlPrefix() + "_acl"));
 
             // cube acl
             Result result = aclHtable.get(new Get(Bytes.toBytes(origCubeId)));
@@ -250,10 +245,8 @@ public class ExtendCubeToHybridCLI {
                     byte[] value = CellUtil.cloneValue(cell);
 
                     // use the target project uuid as the parent
-                    if (Bytes.toString(family).equals(ACL_INFO_FAMILY)
-                            && Bytes.toString(column).equals(ACL_INFO_FAMILY_PARENT_COLUMN)) {
-                        String valueString = "{\"id\":\"" + projUUID
-                                + "\",\"type\":\"org.apache.kylin.metadata.project.ProjectInstance\"}";
+                    if (Bytes.toString(family).equals(ACL_INFO_FAMILY) && Bytes.toString(column).equals(ACL_INFO_FAMILY_PARENT_COLUMN)) {
+                        String valueString = "{\"id\":\"" + projUUID + "\",\"type\":\"org.apache.kylin.metadata.project.ProjectInstance\"}";
                         value = Bytes.toBytes(valueString);
                     }
                     Put put = new Put(Bytes.toBytes(newCubeId));

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/HBaseUsageExtractor.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/HBaseUsageExtractor.java b/tool/src/main/java/org/apache/kylin/tool/HBaseUsageExtractor.java
index 8d0da59..0d8c08f 100644
--- a/tool/src/main/java/org/apache/kylin/tool/HBaseUsageExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/HBaseUsageExtractor.java
@@ -53,11 +53,9 @@ public class HBaseUsageExtractor extends AbstractInfoExtractor {
 
     private static final Logger logger = LoggerFactory.getLogger(HBaseUsageExtractor.class);
     @SuppressWarnings("static-access")
-    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false)
-            .withDescription("Specify which cube to extract").create("cube");
+    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("Specify which cube to extract").create("cube");
     @SuppressWarnings("static-access")
-    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false)
-            .withDescription("Specify realizations in which project to extract").create("project");
+    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false).withDescription("Specify realizations in which project to extract").create("project");
 
     private List<String> htables = Lists.newArrayList();
     private Configuration conf;
@@ -103,7 +101,7 @@ public class HBaseUsageExtractor extends AbstractInfoExtractor {
 
         if (optionsHelper.hasOption(OPTION_PROJECT)) {
             String projectNames = optionsHelper.getOptionValue(OPTION_PROJECT);
-            for (String projectName : projectNames.split(",")) {
+            for (String projectName: projectNames.split(",")) {
                 ProjectInstance projectInstance = projectManager.getProject(projectName);
                 if (projectInstance == null) {
                     throw new IllegalArgumentException("Project " + projectName + " does not exist");
@@ -190,11 +188,9 @@ public class HBaseUsageExtractor extends AbstractInfoExtractor {
             File hdfsDir = new File(dest, "hdfs");
             FileUtils.forceMkdir(hdfsDir);
             CliCommandExecutor cliCommandExecutor = kylinConfig.getCliCommandExecutor();
-            String output = cliCommandExecutor
-                    .execute("hadoop fs -ls -R " + conf.get("hbase.rootdir") + "/data/default/KYLIN_*").getSecond();
+            String output = cliCommandExecutor.execute("hadoop fs -ls -R " + conf.get("hbase.rootdir") + "/data/default/KYLIN_*").getSecond();
             FileUtils.writeStringToFile(new File(hdfsDir, "hdfs-files.list"), output, Charset.defaultCharset());
-            output = cliCommandExecutor
-                    .execute("hadoop fs -ls -R " + conf.get("hbase.rootdir") + "/data/default/kylin_*").getSecond();
+            output = cliCommandExecutor.execute("hadoop fs -ls -R " + conf.get("hbase.rootdir") + "/data/default/kylin_*").getSecond();
             FileUtils.writeStringToFile(new File(hdfsDir, "hdfs-files.list"), output, Charset.defaultCharset(), true);
         } catch (Exception e) {
             logger.warn("HBase hdfs status fetch failed: ", e);

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/JobDiagnosisInfoCLI.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/JobDiagnosisInfoCLI.java b/tool/src/main/java/org/apache/kylin/tool/JobDiagnosisInfoCLI.java
index 85dfa98..04dbef7 100644
--- a/tool/src/main/java/org/apache/kylin/tool/JobDiagnosisInfoCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/JobDiagnosisInfoCLI.java
@@ -46,30 +46,19 @@ public class JobDiagnosisInfoCLI extends AbstractInfoExtractor {
     private static final Logger logger = LoggerFactory.getLogger(JobDiagnosisInfoCLI.class);
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_JOB_ID = OptionBuilder.withArgName("jobId").hasArg().isRequired(true)
-            .withDescription("specify the Job ID to extract information. ").create("jobId");
+    private static final Option OPTION_JOB_ID = OptionBuilder.withArgName("jobId").hasArg().isRequired(true).withDescription("specify the Job ID to extract information. ").create("jobId");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_CUBE = OptionBuilder.withArgName("includeCube").hasArg()
-            .isRequired(false)
-            .withDescription("set this to true if want to extract related cube info too. Default true")
-            .create("includeCube");
+    private static final Option OPTION_INCLUDE_CUBE = OptionBuilder.withArgName("includeCube").hasArg().isRequired(false).withDescription("set this to true if want to extract related cube info too. Default true").create("includeCube");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_YARN_LOGS = OptionBuilder.withArgName("includeYarnLogs").hasArg()
-            .isRequired(false)
-            .withDescription("set this to true if want to extract related yarn logs too. Default true")
-            .create("includeYarnLogs");
+    private static final Option OPTION_INCLUDE_YARN_LOGS = OptionBuilder.withArgName("includeYarnLogs").hasArg().isRequired(false).withDescription("set this to true if want to extract related yarn logs too. Default true").create("includeYarnLogs");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_CLIENT = OptionBuilder.withArgName("includeClient").hasArg()
-            .isRequired(false).withDescription("Specify whether to include client info to extract. Default true.")
-            .create("includeClient");
+    private static final Option OPTION_INCLUDE_CLIENT = OptionBuilder.withArgName("includeClient").hasArg().isRequired(false).withDescription("Specify whether to include client info to extract. Default true.").create("includeClient");
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_INCLUDE_CONF = OptionBuilder.withArgName("includeConf").hasArg()
-            .isRequired(false).withDescription("Specify whether to include conf files to extract. Default true.")
-            .create("includeConf");
+    private static final Option OPTION_INCLUDE_CONF = OptionBuilder.withArgName("includeConf").hasArg().isRequired(false).withDescription("Specify whether to include conf files to extract. Default true.").create("includeConf");
 
     List<String> requiredResources = Lists.newArrayList();
     List<String> yarnLogsResources = Lists.newArrayList();
@@ -99,14 +88,10 @@ public class JobDiagnosisInfoCLI extends AbstractInfoExtractor {
     @Override
     protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception {
         String kylinJobId = optionsHelper.getOptionValue(OPTION_JOB_ID);
-        boolean includeCube = optionsHelper.hasOption(OPTION_INCLUDE_CUBE)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CUBE)) : true;
-        boolean includeYarnLogs = optionsHelper.hasOption(OPTION_INCLUDE_YARN_LOGS)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_YARN_LOGS)) : true;
-        boolean includeClient = optionsHelper.hasOption(OPTION_INCLUDE_CLIENT)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CLIENT)) : true;
-        boolean includeConf = optionsHelper.hasOption(OPTION_INCLUDE_CONF)
-                ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CONF)) : true;
+        boolean includeCube = optionsHelper.hasOption(OPTION_INCLUDE_CUBE) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CUBE)) : true;
+        boolean includeYarnLogs = optionsHelper.hasOption(OPTION_INCLUDE_YARN_LOGS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_YARN_LOGS)) : true;
+        boolean includeClient = optionsHelper.hasOption(OPTION_INCLUDE_CLIENT) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CLIENT)) : true;
+        boolean includeConf = optionsHelper.hasOption(OPTION_INCLUDE_CONF) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_CONF)) : true;
 
         // dump job output
         logger.info("Start to dump job output");
@@ -128,8 +113,7 @@ public class JobDiagnosisInfoCLI extends AbstractInfoExtractor {
             if (!StringUtils.isEmpty(cubeName)) {
                 File metaDir = new File(exportDir, "cube");
                 FileUtils.forceMkdir(metaDir);
-                String[] cubeMetaArgs = { "-cube", cubeName, "-destDir", new File(metaDir, cubeName).getAbsolutePath(),
-                        "-includeJobs", "false", "-compress", "false", "-submodule", "true" };
+                String[] cubeMetaArgs = { "-cube", cubeName, "-destDir", new File(metaDir, cubeName).getAbsolutePath(), "-includeJobs", "false", "-compress", "false", "-submodule", "true" };
                 logger.info("Start to extract related cube: " + StringUtils.join(cubeMetaArgs));
                 CubeMetaExtractor cubeMetaExtractor = new CubeMetaExtractor();
                 logger.info("CubeMetaExtractor args: " + Arrays.toString(cubeMetaArgs));
@@ -149,8 +133,7 @@ public class JobDiagnosisInfoCLI extends AbstractInfoExtractor {
         }
 
         if (includeClient) {
-            String[] clientArgs = { "-destDir", new File(exportDir, "client").getAbsolutePath(), "-compress", "false",
-                    "-submodule", "true" };
+            String[] clientArgs = { "-destDir", new File(exportDir, "client").getAbsolutePath(), "-compress", "false", "-submodule", "true" };
             ClientEnvExtractor clientEnvExtractor = new ClientEnvExtractor();
             logger.info("ClientEnvExtractor args: " + Arrays.toString(clientArgs));
             clientEnvExtractor.execute(clientArgs);
@@ -167,8 +150,7 @@ public class JobDiagnosisInfoCLI extends AbstractInfoExtractor {
         }
 
         // export kylin logs
-        String[] logsArgs = { "-destDir", new File(exportDir, "logs").getAbsolutePath(), "-compress", "false",
-                "-submodule", "true" };
+        String[] logsArgs = { "-destDir", new File(exportDir, "logs").getAbsolutePath(), "-compress", "false", "-submodule", "true" };
         KylinLogExtractor logExtractor = new KylinLogExtractor();
         logger.info("KylinLogExtractor args: " + Arrays.toString(logsArgs));
         logExtractor.execute(logsArgs);
@@ -212,8 +194,7 @@ public class JobDiagnosisInfoCLI extends AbstractInfoExtractor {
         if (jobInfo.containsKey(ExecutableConstants.MR_JOB_ID)) {
             String mrJobId = jobInfo.get(ExecutableConstants.MR_JOB_ID);
             FileUtils.forceMkdir(destDir);
-            String[] mrJobArgs = { "-mrJobId", mrJobId, "-destDir", destDir.getAbsolutePath(), "-compress", "false",
-                    "-submodule", "true" };
+            String[] mrJobArgs = { "-mrJobId", mrJobId, "-destDir", destDir.getAbsolutePath(), "-compress", "false", "-submodule", "true" };
             new MrJobInfoExtractor().execute(mrJobArgs);
         }
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/JobInstanceExtractor.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/JobInstanceExtractor.java b/tool/src/main/java/org/apache/kylin/tool/JobInstanceExtractor.java
index 70aaac2..068dbda 100644
--- a/tool/src/main/java/org/apache/kylin/tool/JobInstanceExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/JobInstanceExtractor.java
@@ -55,15 +55,11 @@ public class JobInstanceExtractor extends AbstractInfoExtractor {
     private static final int DEFAULT_PERIOD = 3;
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false)
-            .withDescription("Specify jobs in which project to extract").create("project");
+    private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false).withDescription("Specify jobs in which project to extract").create("project");
     @SuppressWarnings("static-access")
-    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false)
-            .withDescription("Specify jobs related to which cube to extract").create("cube");
+    private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("Specify jobs related to which cube to extract").create("cube");
     @SuppressWarnings("static-access")
-    private static final Option OPTION_PERIOD = OptionBuilder.withArgName("period").hasArg().isRequired(false)
-            .withDescription("specify how many days of kylin jobs to extract. Default " + DEFAULT_PERIOD + ".")
-            .create("period");
+    private static final Option OPTION_PERIOD = OptionBuilder.withArgName("period").hasArg().isRequired(false).withDescription("specify how many days of kylin jobs to extract. Default " + DEFAULT_PERIOD + ".").create("period");
 
     KylinConfig config;
     ProjectManager projectManager;
@@ -85,8 +81,7 @@ public class JobInstanceExtractor extends AbstractInfoExtractor {
     protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception {
         String cube = optionsHelper.hasOption(OPTION_CUBE) ? optionsHelper.getOptionValue(OPTION_CUBE) : null;
         String project = optionsHelper.hasOption(OPTION_PROJECT) ? optionsHelper.getOptionValue(OPTION_PROJECT) : null;
-        int period = optionsHelper.hasOption(OPTION_PERIOD)
-                ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_PERIOD)) : DEFAULT_PERIOD;
+        int period = optionsHelper.hasOption(OPTION_PERIOD) ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_PERIOD)) : DEFAULT_PERIOD;
 
         long endTime = System.currentTimeMillis();
         long startTime = endTime - period * 24 * 3600 * 1000; // time in Millis
@@ -112,8 +107,7 @@ public class JobInstanceExtractor extends AbstractInfoExtractor {
                         shouldExtract = true;
                     } else {
                         ProjectInstance projectInstance = projectManager.getProject(project);
-                        if (projectInstance != null
-                                && projectInstance.containsRealization(RealizationType.CUBE, cubeName)) {
+                        if (projectInstance != null && projectInstance.containsRealization(RealizationType.CUBE, cubeName)) {
                             shouldExtract = true;
                         }
                     }
@@ -141,8 +135,7 @@ public class JobInstanceExtractor extends AbstractInfoExtractor {
         result.setMrWaiting(AbstractExecutable.getExtraInfoAsLong(output, CubingJob.MAP_REDUCE_WAIT_TIME, 0L) / 1000);
         result.setExecStartTime(AbstractExecutable.getStartTime(output));
         result.setExecEndTime(AbstractExecutable.getEndTime(output));
-        result.setDuration(AbstractExecutable.getDuration(AbstractExecutable.getStartTime(output),
-                AbstractExecutable.getEndTime(output)) / 1000);
+        result.setDuration(AbstractExecutable.getDuration(AbstractExecutable.getStartTime(output), AbstractExecutable.getEndTime(output)) / 1000);
         for (int i = 0; i < cubeJob.getTasks().size(); ++i) {
             AbstractExecutable task = cubeJob.getTasks().get(i);
             result.addStep(parseToJobStep(task, i, outputs.get(task.getId())));
@@ -187,9 +180,7 @@ public class JobInstanceExtractor extends AbstractInfoExtractor {
         }
         if (task instanceof MapReduceExecutable) {
             result.setExecCmd(((MapReduceExecutable) task).getMapReduceParams());
-            result.setExecWaitTime(
-                    AbstractExecutable.getExtraInfoAsLong(stepOutput, MapReduceExecutable.MAP_REDUCE_WAIT_TIME, 0L)
-                            / 1000);
+            result.setExecWaitTime(AbstractExecutable.getExtraInfoAsLong(stepOutput, MapReduceExecutable.MAP_REDUCE_WAIT_TIME, 0L) / 1000);
         }
         if (task instanceof HadoopShellExecutable) {
             result.setExecCmd(((HadoopShellExecutable) task).getJobParams());

http://git-wip-us.apache.org/repos/asf/kylin/blob/19585846/tool/src/main/java/org/apache/kylin/tool/KylinLogExtractor.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/KylinLogExtractor.java b/tool/src/main/java/org/apache/kylin/tool/KylinLogExtractor.java
index 118eb66..6555c4d 100644
--- a/tool/src/main/java/org/apache/kylin/tool/KylinLogExtractor.java
+++ b/tool/src/main/java/org/apache/kylin/tool/KylinLogExtractor.java
@@ -43,9 +43,7 @@ public class KylinLogExtractor extends AbstractInfoExtractor {
     private static final int DEFAULT_LOG_PERIOD = 3;
 
     @SuppressWarnings("static-access")
-    private static final Option OPTION_LOG_PERIOD = OptionBuilder.withArgName("logPeriod").hasArg().isRequired(false)
-            .withDescription("specify how many days of kylin logs to extract. Default " + DEFAULT_LOG_PERIOD + ".")
-            .create("logPeriod");
+    private static final Option OPTION_LOG_PERIOD = OptionBuilder.withArgName("logPeriod").hasArg().isRequired(false).withDescription("specify how many days of kylin logs to extract. Default " + DEFAULT_LOG_PERIOD + ".").create("logPeriod");
 
     KylinConfig config;
 
@@ -76,8 +74,7 @@ public class KylinLogExtractor extends AbstractInfoExtractor {
     protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception {
         beforeExtract();
 
-        int logPeriod = optionsHelper.hasOption(OPTION_LOG_PERIOD)
-                ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_LOG_PERIOD)) : DEFAULT_LOG_PERIOD;
+        int logPeriod = optionsHelper.hasOption(OPTION_LOG_PERIOD) ? Integer.valueOf(optionsHelper.getOptionValue(OPTION_LOG_PERIOD)) : DEFAULT_LOG_PERIOD;
 
         if (logPeriod < 1) {
             logger.warn("No logs to extract.");